def test_reproject_gcps(self): ds = gdal.Open(self.test_file) d = Domain(ds=ds) d.reproject_gcps( '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=75 +lon_0=10 +no_defs' ) gcp = d.vrt.dataset.GetGCPs()[0] self.assertTrue(gcp.GCPX > 636161) self.assertTrue(gcp.GCPY < -288344)
def test_get_pixelsize_meters(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") x, y = d.get_pixelsize_meters() self.assertEqual(int(x), 444) self.assertEqual(int(y), 723) d = Domain(ds=gdal.Open(self.test_file_projected)) x, y = d.get_pixelsize_meters() self.assertEqual(int(x), 500) self.assertEqual(int(y), 500)
def test_export2thredds_arctic_long_lat(self): n = Nansat(self.test_file_arctic, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export2thredds_arctic.nc') bands = { 'Bristol': { 'type': '>i2' }, 'Bootstrap': { 'type': '>i2' }, 'UMass_AES': { 'type': '>i2' }, } n.export2thredds(tmpfilename, bands, time=datetime.datetime(2016, 1, 20)) self.assertTrue(os.path.exists(tmpfilename)) g = gdal.Open(tmpfilename) metadata = g.GetMetadata_Dict() # Test that the long/lat values are set aproximately correct ncg = 'NC_GLOBAL#' easternmost_longitude = metadata.get(ncg + 'easternmost_longitude') self.assertTrue( float(easternmost_longitude) > 179, 'easternmost_longitude is wrong:' + easternmost_longitude) westernmost_longitude = metadata.get(ncg + 'westernmost_longitude') self.assertTrue( float(westernmost_longitude) < -179, 'westernmost_longitude is wrong:' + westernmost_longitude) northernmost_latitude = metadata.get(ncg + 'northernmost_latitude') self.assertTrue( float(northernmost_latitude) > 89.999, 'northernmost_latitude is wrong:' + northernmost_latitude) southernmost_latitude = metadata.get(ncg + 'southernmost_latitude') self.assertTrue( float(southernmost_latitude) < 54, 'southernmost_latitude is wrong:' + southernmost_latitude) self.assertTrue( float(southernmost_latitude) > 53, 'southernmost_latitude is wrong:' + southernmost_latitude)
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): """Create VRT""" try: ice_folder_name = kwargs['iceFolder'] except: #iceFolderName = '/vol/istjenesten/data/metnoCharts/' ice_folder_name = '/vol/data/metnoCharts/' keyword_base = 'metno_local_hires_seaice' if filename[0:len(keyword_base)] != keyword_base: raise WrongMapperError keyword_time = filename[len(keyword_base) + 1:] requested_time = datetime.strptime(keyword_time, '%Y%m%d') # Search for nearest available file, within the closest 3 days found_dataset = False for delta_day in [0, -1, 1, -2, 2, -3, 3]: valid_time = (requested_time + timedelta(days=delta_day) + timedelta(hours=15)) filename = (ice_folder_name + 'ice_conc_svalbard_' + valid_time.strftime('%Y%m%d1500.nc')) if os.path.exists(filename): print('Found file:') print(filename) gdal_dataset = gdal.Open(filename) gdal_metadata = gdalDataset.GetMetadata() mg.Mapper.__init__(self, filename, gdal_dataset, gdal_metadata) found_dataset = True # Modify GeoTransform from netCDF file # - otherwise a shift is seen! self.dataset.SetGeoTransform( (-1243508 - 1000, 1000, 0, -210526 - 7000, 0, -1000)) break # Data is found for this day if found_dataset is False: AttributeError("No local Svalbard-ice files available") sys.exit()
def __init__(self, fileName, gdalDataset, gdalMetadata, GCP_COUNT=30, **kwargs): ''' Create MODIS_L1 VRT ''' #list of available modis names:resolutions modisResolutions = { 'MYD02QKM': 250, 'MOD02QKM': 250, 'MYD02HKM': 500, 'MOD02HKM': 500, 'MYD021KM': 1000, 'MOD021KM': 1000 } #should raise error in case of not MODIS_L1 try: mResolution = modisResolutions[gdalMetadata["SHORTNAME"]] except: raise WrongMapperError # get 1st subdataset and parse to VRT.__init__() # for retrieving geo-metadata try: gdalSubDataset = gdal.Open(gdalDataset.GetSubDatasets()[0][0]) except (AttributeError, IndexError): raise WrongMapperError # create empty VRT dataset with geolocation only VRT.__init__(self, gdalSubDataset) subDsString = 'HDF4_EOS:EOS_SWATH:"%s":MODIS_SWATH_Type_L1B:%s' #provide all mappings metaDict250SF = ['EV_250_RefSB'] metaDict250 = [{ 'src': { 'SourceFilename': subDsString % (fileName, 'EV_250_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '645' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_250_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '858' } }] metaDict500SF = ['EV_250_Aggr500_RefSB', 'EV_500_RefSB'] metaDict500 = [{ 'src': { 'SourceFilename': subDsString % (fileName, 'EV_250_Aggr500_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '645' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_250_Aggr500_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '858' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '469' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '555' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1240' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1640' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '2130' } }] metaDict1000SF = [ 'EV_250_Aggr1km_RefSB', 'EV_500_Aggr1km_RefSB', 'EV_1KM_RefSB', 'EV_1KM_Emissive' ] metaDict1000 = [{ 'src': { 'SourceFilename': subDsString % (fileName, 'EV_250_Aggr1km_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '645' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_250_Aggr1km_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '858' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '469' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '555' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1240' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1640' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '2130' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '412' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '443' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '488' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '531' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '551' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 6 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '667' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 7 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '667' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 8 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '678' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 9 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '678' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 10 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '748' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 11 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '869' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 12 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '905' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 13 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '936' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 14 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '940' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'), 'SourceBand': 15 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1375' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '3750' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '3959' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '3959' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '4050' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '4465' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 6 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '4515' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 7 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '6715' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 8 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '7325' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 9 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '8550' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 10 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '9730' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 11 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '11030' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 12 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '12020' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 13 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '13335' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 14 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '13635' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 15 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '13935' } }, { 'src': { 'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'), 'SourceBand': 16 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '14235' } }] # get proper mapping depending on resolution metaDict = { 250: metaDict250, 500: metaDict500, 1000: metaDict1000, }[mResolution] # get proper mapping depending on resolution metaDictSF = { 250: metaDict250SF, 500: metaDict500SF, 1000: metaDict1000SF, }[mResolution] # read all scales/offsets rScales = {} rOffsets = {} for sf in metaDictSF: dsName = subDsString % (fileName, sf) ds = gdal.Open(dsName) rScales[dsName] = map( float, ds.GetMetadataItem('radiance_scales').split(',')) rOffsets[dsName] = map( float, ds.GetMetadataItem('radiance_offsets').split(',')) self.logger.debug('radiance_scales: %s' % str(rScales)) # add 'band_name' to 'parameters' for bandDict in metaDict: SourceFilename = bandDict['src']['SourceFilename'] SourceBand = bandDict['src']['SourceBand'] bandDict['dst']['suffix'] = bandDict['dst']['wavelength'] scale = rScales[SourceFilename][SourceBand - 1] offset = rOffsets[SourceFilename][SourceBand - 1] self.logger.debug('band, scale, offset: %s_%d %s %s' % (SourceFilename, SourceBand, scale, offset)) bandDict['src']['ScaleRatio'] = scale bandDict['src']['ScaleOffset'] = offset # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) productDate = gdalMetadata["RANGEBEGINNINGDATE"] productTime = gdalMetadata["RANGEBEGINNINGTIME"] self.remove_geolocationArray() # set required metadata self.dataset.SetMetadataItem( 'time_coverage_start', (parse(gdalMetadata["RANGEBEGINNINGDATE"] + ' ' + gdalMetadata["RANGEBEGINNINGTIME"]).isoformat())) self.dataset.SetMetadataItem( 'time_coverage_end', (parse(gdalMetadata["RANGEENDINGDATE"] + ' ' + gdalMetadata["RANGEENDINGTIME"]).isoformat())) instrumentName = self.find_metadata(gdalMetadata, 'ASSOCIATEDINSTRUMENTSHORTNAME', 'MODIS') platformName = self.find_metadata(gdalMetadata, 'ASSOCIATEDPLATFORMSHORTNAME', 'AQUA') mm = pti.get_gcmd_instrument(instrumentName) ee = pti.get_gcmd_platform(platformName) self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) lonSubdataset = [ subdatasetName[0] for subdatasetName in gdalDataset.GetSubDatasets() if 'Longitude' in subdatasetName[1] ][0] latSubdataset = [ subdatasetName[0] for subdatasetName in gdalDataset.GetSubDatasets() if 'Latitude' in subdatasetName[1] ][0] lons = gdal.Open(lonSubdataset).ReadAsArray() lats = gdal.Open(latSubdataset).ReadAsArray() gcps = [] rows = range(0, lons.shape[0], lons.shape[0] / GCP_COUNT) cols = range(0, lons.shape[1], lons.shape[1] / GCP_COUNT) factor = self.dataset.RasterYSize / lons.shape[0] for r in rows: for c in cols: gcps.append( gdal.GCP(float(lons[r, c]), float(lats[r, c]), 0, factor * c + 0.5, factor * r + 0.5)) self.dataset.SetGCPs(gcps, self.dataset.GetGCPProjection()) self.tps = True
def __init__(self, fileName, gdalDataset, gdalMetadata, manifestonly=False, **kwargs): if zipfile.is_zipfile(fileName): zz = zipfile.PyZipFile(fileName) # Assuming the file names are consistent, the polarization # dependent data should be sorted equally such that we can use the # same indices consistently for all the following lists # THIS IS NOT THE CASE... mdsFiles = ['/vsizip/%s/%s' % (fileName, fn) for fn in zz.namelist() if 'measurement/s1a' in fn] calFiles = ['/vsizip/%s/%s' % (fileName, fn) for fn in zz.namelist() if 'annotation/calibration/calibration-s1a' in fn] noiseFiles = ['/vsizip/%s/%s' % (fileName, fn) for fn in zz.namelist() if 'annotation/calibration/noise-s1a' in fn] annotationFiles = ['/vsizip/%s/%s' % (fileName, fn) for fn in zz.namelist() if 'annotation/s1a' in fn] manifestFile = ['/vsizip/%s/%s' % (fileName, fn) for fn in zz.namelist() if 'manifest.safe' in fn] zz.close() else: mdsFiles = glob.glob('%s/measurement/s1a*' % fileName) calFiles = glob.glob('%s/annotation/calibration/calibration-s1a*' % fileName) noiseFiles = glob.glob('%s/annotation/calibration/noise-s1a*' % fileName) annotationFiles = glob.glob('%s/annotation/s1a*' % fileName) manifestFile = glob.glob('%s/manifest.safe' % fileName) if (not mdsFiles or not calFiles or not noiseFiles or not annotationFiles or not manifestFile): raise WrongMapperError mdsDict = {} for ff in mdsFiles: mdsDict[ os.path.splitext(os.path.basename(ff))[0].split('-')[3]] = ff self.calXMLDict = {} for ff in calFiles: self.calXMLDict[ os.path.splitext( os.path.basename(ff))[0].split('-')[4]] = self.read_xml(ff) self.noiseXMLDict = {} for ff in noiseFiles: self.noiseXMLDict[ os.path.splitext( os.path.basename(ff))[0].split('-')[4]] = self.read_xml(ff) self.annotationXMLDict = {} for ff in annotationFiles: self.annotationXMLDict[ os.path.splitext( os.path.basename(ff))[0].split('-')[3]] = self.read_xml(ff) self.manifestXML = self.read_xml(manifestFile[0]) # very fast constructor without any bands if manifestonly: self.init_from_manifest_only(self.manifestXML, self.annotationXMLDict[ self.annotationXMLDict.keys()[0]]) return gdalDatasets = {} for key in mdsDict.keys(): # Open data files gdalDatasets[key] = gdal.Open(mdsDict[key]) if not gdalDatasets: raise WrongMapperError('No Sentinel-1 datasets found') # Check metadata to confirm it is Sentinel-1 L1 for key in gdalDatasets: metadata = gdalDatasets[key].GetMetadata() break if not 'TIFFTAG_IMAGEDESCRIPTION' in metadata.keys(): raise WrongMapperError if (not 'Sentinel-1' in metadata['TIFFTAG_IMAGEDESCRIPTION'] and not 'L1' in metadata['TIFFTAG_IMAGEDESCRIPTION']): raise WrongMapperError warnings.warn('Sentinel-1 level-1 mapper is not yet adapted to ' 'complex data. In addition, the band names should be ' 'updated for multi-swath data - ' 'and there might be other issues.') # create empty VRT dataset with geolocation only for key in gdalDatasets: VRT.__init__(self, gdalDatasets[key]) break # Read annotation, noise and calibration xml-files pol = {} it = 0 for key in self.annotationXMLDict: xml = Node.create(self.annotationXMLDict[key]) pol[key] = (xml.node('product'). node('adsHeader')['polarisation'].upper()) it += 1 if it == 1: # Get incidence angle pi = xml.node('generalAnnotation').node('productInformation') self.dataset.SetMetadataItem('ORBIT_DIRECTION', str(pi['pass'])) (X, Y, lon, lat, inc, ele, numberOfSamples, numberOfLines) = self.read_geolocation_lut( self.annotationXMLDict[key]) X = np.unique(X) Y = np.unique(Y) lon = np.array(lon).reshape(len(Y), len(X)) lat = np.array(lat).reshape(len(Y), len(X)) inc = np.array(inc).reshape(len(Y), len(X)) ele = np.array(ele).reshape(len(Y), len(X)) incVRT = VRT(array=inc, lat=lat, lon=lon) eleVRT = VRT(array=ele, lat=lat, lon=lon) incVRT = incVRT.get_resized_vrt(self.dataset.RasterXSize, self.dataset.RasterYSize, eResampleAlg=2) eleVRT = eleVRT.get_resized_vrt(self.dataset.RasterXSize, self.dataset.RasterYSize, eResampleAlg=2) self.bandVRTs['incVRT'] = incVRT self.bandVRTs['eleVRT'] = eleVRT for key in self.calXMLDict: calibration_LUT_VRTs, longitude, latitude = ( self.get_LUT_VRTs(self.calXMLDict[key], 'calibrationVectorList', ['sigmaNought', 'betaNought', 'gamma', 'dn'] )) self.bandVRTs['LUT_sigmaNought_VRT_'+pol[key]] = ( calibration_LUT_VRTs['sigmaNought']. get_resized_vrt(self.dataset.RasterXSize, self.dataset.RasterYSize, eResampleAlg=1)) self.bandVRTs['LUT_betaNought_VRT_'+pol[key]] = ( calibration_LUT_VRTs['betaNought']. get_resized_vrt(self.dataset.RasterXSize, self.dataset.RasterYSize, eResampleAlg=1)) self.bandVRTs['LUT_gamma_VRT'] = calibration_LUT_VRTs['gamma'] self.bandVRTs['LUT_dn_VRT'] = calibration_LUT_VRTs['dn'] for key in self.noiseXMLDict: noise_LUT_VRT = self.get_LUT_VRTs(self.noiseXMLDict[key], 'noiseVectorList', ['noiseLut'])[0] self.bandVRTs['LUT_noise_VRT_'+pol[key]] = ( noise_LUT_VRT['noiseLut'].get_resized_vrt( self.dataset.RasterXSize, self.dataset.RasterYSize, eResampleAlg=1)) metaDict = [] bandNumberDict = {} bnmax = 0 for key in gdalDatasets.keys(): dsPath, dsName = os.path.split(mdsDict[key]) name = 'DN_%s' % pol[key] # A dictionary of band numbers is needed for the pixel function # bands further down. This is not the best solution. It would be # better to have a function in VRT that returns the number given a # band name. This function exists in Nansat but could perhaps be # moved to VRT? The existing nansat function could just call the # VRT one... bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] band = gdalDatasets[key].GetRasterBand(1) dtype = band.DataType metaDict.append({ 'src': { 'SourceFilename': mdsDict[key], 'SourceBand': 1, 'DataType': dtype, }, 'dst': { 'name': name, #'SourceTransferType': gdal.GetDataTypeName(dtype), #'dataType': 6, }, }) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) ''' Calibration should be performed as s0 = DN^2/sigmaNought^2, where sigmaNought is from e.g. annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml, and DN is the Digital Numbers in the tiff files. Also the noise should be subtracted. See https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT ''' # Get look direction sat_heading = initial_bearing(longitude[:-1, :], latitude[:-1, :], longitude[1:, :], latitude[1:, :]) look_direction = scipy.ndimage.interpolation.zoom( np.mod(sat_heading + 90, 360), (np.shape(longitude)[0] / (np.shape(longitude)[0]-1.), 1)) # Decompose, to avoid interpolation errors around 0 <-> 360 look_direction_u = np.sin(np.deg2rad(look_direction)) look_direction_v = np.cos(np.deg2rad(look_direction)) look_u_VRT = VRT(array=look_direction_u, lat=latitude, lon=longitude) look_v_VRT = VRT(array=look_direction_v, lat=latitude, lon=longitude) lookVRT = VRT(lat=latitude, lon=longitude) lookVRT._create_band([{'SourceFilename': look_u_VRT.fileName, 'SourceBand': 1}, {'SourceFilename': look_v_VRT.fileName, 'SourceBand': 1}], {'PixelFunctionType': 'UVToDirectionTo'} ) # Blow up to full size lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize, self.dataset.RasterYSize, eResampleAlg=1) # Store VRTs so that they are accessible later self.bandVRTs['look_u_VRT'] = look_u_VRT self.bandVRTs['look_v_VRT'] = look_v_VRT self.bandVRTs['lookVRT'] = lookVRT metaDict = [] # Add bands to full size VRT for key in pol: name = 'LUT_sigmaNought_%s' % pol[key] bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] metaDict.append( {'src': {'SourceFilename': (self.bandVRTs['LUT_sigmaNought_VRT_' + pol[key]].fileName), 'SourceBand': 1 }, 'dst': {'name': name } }) name = 'LUT_noise_%s' % pol[key] bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] metaDict.append({ 'src': { 'SourceFilename': self.bandVRTs['LUT_noise_VRT_' + pol[key]].fileName, 'SourceBand': 1 }, 'dst': { 'name': name } }) name = 'look_direction' bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] metaDict.append({ 'src': { 'SourceFilename': self.bandVRTs['lookVRT'].fileName, 'SourceBand': 1 }, 'dst': { 'wkv': 'sensor_azimuth_angle', 'name': name } }) for key in gdalDatasets.keys(): dsPath, dsName = os.path.split(mdsDict[key]) name = 'sigma0_%s' % pol[key] bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] metaDict.append( {'src': [{'SourceFilename': self.fileName, 'SourceBand': bandNumberDict['DN_%s' % pol[key]], }, {'SourceFilename': (self.bandVRTs['LUT_sigmaNought_VRT_%s' % pol[key]].fileName), 'SourceBand': 1 } ], 'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'Sentinel1Calibration', 'polarization': pol[key], 'suffix': pol[key], }, }) name = 'beta0_%s' % pol[key] bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] metaDict.append( {'src': [{'SourceFilename': self.fileName, 'SourceBand': bandNumberDict['DN_%s' % pol[key]] }, {'SourceFilename': (self.bandVRTs['LUT_betaNought_VRT_%s' % pol[key]].fileName), 'SourceBand': 1 } ], 'dst': {'wkv': 'surface_backwards_brightness_coefficient_of_radar_wave', 'PixelFunctionType': 'Sentinel1Calibration', 'polarization': pol[key], 'suffix': pol[key], }, }) self._create_bands(metaDict) # Add incidence angle as band name = 'incidence_angle' bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] src = {'SourceFilename': self.bandVRTs['incVRT'].fileName, 'SourceBand': 1} dst = {'wkv': 'angle_of_incidence', 'name': name} self._create_band(src, dst) self.dataset.FlushCache() # Add elevation angle as band name = 'elevation_angle' bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] src = {'SourceFilename': self.bandVRTs['eleVRT'].fileName, 'SourceBand': 1} dst = {'wkv': 'angle_of_elevation', 'name': name} self._create_band(src, dst) self.dataset.FlushCache() # Add sigma0_VV pp = [pol[key] for key in pol] if 'VV' not in pp and 'HH' in pp: name = 'sigma0_VV' bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] src = [{'SourceFilename': self.fileName, 'SourceBand': bandNumberDict['DN_HH'], }, {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_HH']. fileName), 'SourceBand': 1 }, {'SourceFilename': (self.bandVRTs['LUT_sigmaNought_VRT_HH']. fileName), 'SourceBand': 1, }, {'SourceFilename': self.bandVRTs['incVRT'].fileName, 'SourceBand': 1} ] dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV', 'polarization': 'VV', 'suffix': 'VV'} self._create_band(src, dst) self.dataset.FlushCache() # set time as acquisition start time n = Node.create(self.manifestXML) meta = n.node('metadataSection') for nn in meta.children: if nn.getAttribute('ID') == u'acquisitionPeriod': # set valid time self.dataset.SetMetadataItem( 'time_coverage_start', parse((nn.node('metadataWrap'). node('xmlData'). node('safe:acquisitionPeriod')['safe:startTime']) ).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse((nn.node('metadataWrap'). node('xmlData'). node('safe:acquisitionPeriod')['safe:stopTime']) ).isoformat()) # Get dictionary describing the instrument and platform according to # the GCMD keywords mm = pti.get_gcmd_instrument('sar') ee = pti.get_gcmd_platform('sentinel-1a') # TODO: Validate that the found instrument and platform are indeed what we # want.... self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, emrange='VNIR', **kwargs): ''' Create MODIS_L1 VRT ''' # check mapper try: INSTRUMENTSHORTNAME = gdalMetadata['INSTRUMENTSHORTNAME'] except: raise WrongMapperError if INSTRUMENTSHORTNAME != 'ASTER': raise WrongMapperError try: SHORTNAME = gdalMetadata['SHORTNAME'] except: raise WrongMapperError if SHORTNAME != 'ASTL1B': raise WrongMapperError # set up metadict for data with various resolution subDSString = 'HDF4_EOS:EOS_SWATH:"%s":%s:%s' metaDictVNIR = [ { 'src': { 'SourceFilename': subDSString % (filename, 'VNIR_Swath', 'ImageData1') }, 'dst': { 'wavelength': '560' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'VNIR_Swath', 'ImageData2') }, 'dst': { 'wavelength': '660' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'VNIR_Swath', 'ImageData3N') }, 'dst': { 'wavelength': '820' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'VNIR_Swath', 'ImageData3B') }, 'dst': { 'wavelength': '820' } }, ] metaDictSWIR = [ { 'src': { 'SourceFilename': subDSString % (filename, 'SWIR_Swath', 'ImageData4') }, 'dst': { 'wavelength': '1650' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'SWIR_Swath', 'ImageData5') }, 'dst': { 'wavelength': '2165' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'SWIR_Swath', 'ImageData6') }, 'dst': { 'wavelength': '2205' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'SWIR_Swath', 'ImageData7') }, 'dst': { 'wavelength': '2260' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'SWIR_Swath', 'ImageData8') }, 'dst': { 'wavelength': '2330' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'SWIR_Swath', 'ImageData9') }, 'dst': { 'wavelength': '2395' } }, ] metaDictTIR = [ { 'src': { 'SourceFilename': subDSString % (filename, 'TIR_Swath', 'ImageData10') }, 'dst': { 'wavelength': '8300' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'TIR_Swath', 'ImageData11') }, 'dst': { 'wavelength': '8650' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'TIR_Swath', 'ImageData12') }, 'dst': { 'wavelength': '9100' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'TIR_Swath', 'ImageData13') }, 'dst': { 'wavelength': '10600' } }, { 'src': { 'SourceFilename': subDSString % (filename, 'TIR_Swath', 'ImageData14') }, 'dst': { 'wavelength': '11300' } }, ] # select appropriate metaDict based on <emrange> parameter metaDict = { 'VNIR': metaDictVNIR, 'SWIR': metaDictSWIR, 'TIR': metaDictTIR, }[emrange] # get 1st EOS subdataset and parse to VRT.__init__() # for retrieving geo-metadata try: gdalSubDataset0 = gdal.Open(metaDict[0]['src']['SourceFilename']) except (AttributeError, IndexError): raise WrongMapperError # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(gdalSubDataset0, metadata=gdalMetadata) # add source band, wkv and suffix for metaEntry in metaDict: metaEntry['src']['SourceBand'] = 1 metaEntry['dst']['wkv'] = 'toa_outgoing_spectral_radiance' metaEntry['dst']['suffix'] = metaEntry['dst']['wavelength'] if 'ImageData3N' in metaEntry['src']['SourceFilename']: metaEntry['dst']['suffix'] += 'N' if 'ImageData3B' in metaEntry['src']['SourceFilename']: metaEntry['dst']['suffix'] += 'B' # add scale and offset for metaEntry in metaDict: bandNo = metaEntry['src']['SourceFilename'].strip().split( ':')[-1].replace('ImageData', '') metaEntry['src']['ScaleRatio'] = float(gdalMetadata['INCL' + bandNo]) metaEntry['src']['ScaleOffset'] = float(gdalMetadata['OFFSET' + bandNo]) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) # set time datetimeString = self.find_metadata(gdalMetadata, "SETTINGTIMEOFPOINTING") # Adding valid time to dataset self.dataset.SetMetadataItem('time_coverage_start', parse(datetimeString + '+00').isoformat()) self.dataset.SetMetadataItem('time_coverage_end', parse(datetimeString + '+00').isoformat()) mm = pti.get_gcmd_instrument('ASTER') ee = pti.get_gcmd_platform('TERRA') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) self._remove_geolocation()
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, rmMetadatas=[ 'NETCDF_VARNAME', '_Unsigned', 'ScaleRatio', 'ScaleOffset', 'dods_variable' ], **kwargs): # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_' # from keys in gdalDataset tmpGdalMetadata = {} geoMetadata = {} origin_is_nansat = False if not gdalMetadata: raise WrongMapperError for key in gdalMetadata.keys(): newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '') if 'NANSAT_' in newKey: geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key] origin_is_nansat = True else: tmpGdalMetadata[newKey] = gdalMetadata[key] gdalMetadata = tmpGdalMetadata fileExt = os.path.splitext(inputFileName)[1] # Get file names from dataset or subdataset subDatasets = gdalDataset.GetSubDatasets() if len(subDatasets) == 0: fileNames = [inputFileName] else: fileNames = [f[0] for f in subDatasets] # add bands with metadata and corresponding values to the empty VRT metaDict = [] xDatasetSource = '' yDatasetSource = '' firstXSize = 0 firstYSize = 0 for _, fileName in enumerate(fileNames): subDataset = gdal.Open(fileName) # choose the first dataset whith grid if (firstXSize == 0 and firstYSize == 0 and subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1): firstXSize = subDataset.RasterXSize firstYSize = subDataset.RasterYSize firstSubDataset = subDataset # get projection from the first subDataset projection = firstSubDataset.GetProjection() # take bands whose sizes are same as the first band. if (subDataset.RasterXSize == firstXSize and subDataset.RasterYSize == firstYSize): if projection == '': projection = subDataset.GetProjection() if ('GEOLOCATION_X_DATASET' in fileName or 'longitude' in fileName): xDatasetSource = fileName elif ('GEOLOCATION_Y_DATASET' in fileName or 'latitude' in fileName): yDatasetSource = fileName else: for iBand in range(subDataset.RasterCount): subBand = subDataset.GetRasterBand(iBand + 1) bandMetadata = subBand.GetMetadata_Dict() if 'PixelFunctionType' in bandMetadata: bandMetadata.pop('PixelFunctionType') sourceBands = iBand + 1 # sourceBands = i*subDataset.RasterCount + iBand + 1 # generate src metadata src = { 'SourceFilename': fileName, 'SourceBand': sourceBands } # set scale ratio and scale offset scaleRatio = bandMetadata.get( 'ScaleRatio', bandMetadata.get( 'scale', bandMetadata.get('scale_factor', ''))) if len(scaleRatio) > 0: src['ScaleRatio'] = scaleRatio scaleOffset = bandMetadata.get( 'ScaleOffset', bandMetadata.get( 'offset', bandMetadata.get('add_offset', ''))) if len(scaleOffset) > 0: src['ScaleOffset'] = scaleOffset # sate DataType src['DataType'] = subBand.DataType # generate dst metadata # get all metadata from input band dst = bandMetadata # set wkv and bandname dst['wkv'] = bandMetadata.get('standard_name', '') # first, try the name metadata if 'name' in bandMetadata: bandName = bandMetadata['name'] else: # if it doesn't exist get name from NETCDF_VARNAME bandName = bandMetadata.get('NETCDF_VARNAME', '') if len(bandName) == 0: bandName = bandMetadata.get( 'dods_variable', '') # remove digits added by gdal in # exporting to netcdf... if (len(bandName) > 0 and origin_is_nansat and fileExt == '.nc'): if bandName[-1:].isdigit(): bandName = bandName[:-1] if bandName[-1:].isdigit(): bandName = bandName[:-1] # if still no bandname, create one if len(bandName) == 0: bandName = 'band_%03d' % iBand dst['name'] = bandName # remove non-necessary metadata from dst for rmMetadata in rmMetadatas: if rmMetadata in dst: dst.pop(rmMetadata) # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # create empty VRT dataset with geolocation only VRT.__init__(self, firstSubDataset, srcMetadata=gdalMetadata) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # Create complex data bands from 'xxx_real' and 'xxx_imag' bands # using pixelfunctions rmBands = [] for iBandNo in range(self.dataset.RasterCount): iBand = self.dataset.GetRasterBand(iBandNo + 1) iBandName = iBand.GetMetadataItem('name') # find real data band if iBandName.find("_real") != -1: realBandNo = iBandNo realBand = self.dataset.GetRasterBand(realBandNo + 1) realDtype = realBand.GetMetadataItem('DataType') bandName = iBandName.replace(iBandName.split('_')[-1], '')[0:-1] for jBandNo in range(self.dataset.RasterCount): jBand = self.dataset.GetRasterBand(jBandNo + 1) jBandName = jBand.GetMetadataItem('name') # find an imaginary data band corresponding to the real # data band and create complex data band from the bands if jBandName.find(bandName + '_imag') != -1: imagBandNo = jBandNo imagBand = self.dataset.GetRasterBand(imagBandNo + 1) imagDtype = imagBand.GetMetadataItem('DataType') dst = imagBand.GetMetadata() dst['name'] = bandName dst['PixelFunctionType'] = 'ComplexData' dst['dataType'] = 10 src = [{ 'SourceFilename': fileNames[realBandNo], 'SourceBand': 1, 'DataType': realDtype }, { 'SourceFilename': fileNames[imagBandNo], 'SourceBand': 1, 'DataType': imagDtype }] self._create_band(src, dst) self.dataset.FlushCache() rmBands.append(realBandNo + 1) rmBands.append(imagBandNo + 1) # Delete real and imaginary bands if len(rmBands) != 0: self.delete_bands(rmBands) if len(projection) == 0: # projection was not set automatically # get projection from GCPProjection projection = geoMetadata.get('GCPProjection', '') if len(projection) == 0: # no projection was found in dataset or metadata: # generate WGS84 by default projection = NSR().wkt # fix problem with MET.NO files where a, b given in m and XC/YC in km if ('UNIT["kilometre"' in projection and ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in projection): projection = projection.replace( ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', '') # set projection self.dataset.SetProjection(self.repare_projection(projection)) # check if GCPs were added from input dataset gcps = firstSubDataset.GetGCPs() gcpProjection = firstSubDataset.GetGCPProjection() # if no GCPs in input dataset: try to add GCPs from metadata if not gcps: gcps = self.add_gcps_from_metadata(geoMetadata) # if yet no GCPs: try to add GCPs from variables if not gcps: gcps = self.add_gcps_from_variables(inputFileName) if gcps: if len(gcpProjection) == 0: # get GCP projection and repare gcpProjection = self.repare_projection( geoMetadata.get('GCPProjection', '')) # add GCPs to dataset self.dataset.SetGCPs(gcps, gcpProjection) self.dataset.SetProjection('') self._remove_geotransform() # Find proper bands and insert GEOLOCATION ARRAY into dataset if len(xDatasetSource) > 0 and len(yDatasetSource) > 0: self.add_geolocationArray( GeolocationArray(xDatasetSource, yDatasetSource)) elif not gcps: # if no GCPs found and not GEOLOCATION ARRAY set: # Set Nansat Geotransform if it is not set automatically geoTransform = self.dataset.GetGeoTransform() if len(geoTransform) == 0: geoTransformStr = geoMetadata.get('GeoTransform', '(0|1|0|0|0|0|1)') geoTransform = eval(geoTransformStr.replace('|', ',')) self.dataset.SetGeoTransform(geoTransform) subMetadata = firstSubDataset.GetMetadata() ### GET START TIME from METADATA time_coverage_start = None if 'start_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_time']) elif 'start_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_date']) elif 'time_coverage_start' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_start']) ### GET END TIME from METADATA time_coverage_end = None if 'stop_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['stop_time']) elif 'stop_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['stop_date']) elif 'time_coverage_stop' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_stop']) elif 'end_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['end_time']) elif 'end_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['end_date']) elif 'time_coverage_end' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['time_coverage_end']) ### GET start time from time variable if (time_coverage_start is None and cfunitsInstalled and 'time#standard_name' in subMetadata and subMetadata['time#standard_name'] == 'time' and 'time#units' in subMetadata and 'time#calendar' in subMetadata): # get data from netcdf data ncFile = netcdf_file(inputFileName, 'r') timeLength = ncFile.variables['time'].shape[0] timeValueStart = ncFile.variables['time'][0] timeValueEnd = ncFile.variables['time'][-1] ncFile.close() try: timeDeltaStart = Units.conform( timeValueStart, Units(subMetadata['time#units'], calendar=subMetadata['time#calendar']), Units('days since 1950-01-01')) except ValueError: self.logger.error('calendar units are wrong: %s' % subMetadata['time#calendar']) else: time_coverage_start = ( datetime.datetime(1950, 1, 1) + datetime.timedelta(float(timeDeltaStart))) if timeLength > 1: timeDeltaEnd = Units.conform( timeValueStart, Units(subMetadata['time#units'], calendar=subMetadata['time#calendar']), Units('days since 1950-01-01')) else: timeDeltaEnd = timeDeltaStart + 1 time_coverage_end = (datetime.datetime(1950, 1, 1) + datetime.timedelta(float(timeDeltaEnd))) ## finally set values of time_coverage start and end if available if time_coverage_start is not None: self.dataset.SetMetadataItem('time_coverage_start', time_coverage_start.isoformat()) if time_coverage_end is not None: self.dataset.SetMetadataItem('time_coverage_end', time_coverage_end.isoformat()) if 'sensor' not in gdalMetadata: self.dataset.SetMetadataItem('sensor', 'unknown') if 'satellite' not in gdalMetadata: self.dataset.SetMetadataItem('satellite', 'unknown') if 'source_type' not in gdalMetadata: self.dataset.SetMetadataItem('source_type', 'unknown') if 'platform' not in gdalMetadata: self.dataset.SetMetadataItem('platform', 'unknown') if 'instrument' not in gdalMetadata: self.dataset.SetMetadataItem('instrument', 'unknown') self.logger.info('Use generic mapper - OK!')
def __init__(self, fileName, gdalDataset, gdalMetadata, logLevel=30, **kwargs): if fileName[0:len(keywordBase)] != keywordBase: raise WrongMapperError( __file__, "Not Nora10 data converted from felt to netCDF") requestedTime = datetime.strptime(fileName[len(keywordBase) + 1:], '%Y%m%d%H%M') # For correct rounding fileTime = requestedTime + timedelta(minutes=30) fileTime = fileTime - timedelta(minutes=fileTime.minute) nc_file = (baseFolder + 'windspeed_10m' + fileTime.strftime('/%Y/%m/') + 'windspeed_' + fileTime.strftime('%Y%m%d%H.nc')) nc_file_winddir = (baseFolder + 'winddir_10m' + fileTime.strftime('/%Y/%m/') + 'winddir_' + fileTime.strftime('%Y%m%d%H.nc')) # Would prefer to use geotransform, but ob_tran # (General Oblique Transformation) is not supported by GDAL # Keeping lines below for potential future use: #proj4 = gdalDataset.GetMetadataItem('projection_rotated_ll#proj4') #proj4 = '+proj=ob_tran +o_proj=longlat +lon_0=-40 +o_lat_p=22 +a=6367470 +e=0' #rlatmin = -13.25; rlatmax = 26.65; deltarlat = 0.1 #rlonmin = 5.75; rlonmax = 30.45; deltarlon = 0.1 # Needed due to precence of time dimension in netCDF file gdal.SetConfigOption('GDAL_NETCDF_BOTTOMUP', 'No') # Read relevant arrays into memory g = gdal.Open('NETCDF:"' + nc_file + '":' + 'windspeed_10m') ws_10m = np.flipud(g.GetRasterBand(1).ReadAsArray()) g = gdal.Open(nc_file_winddir) wd_10m = np.flipud(g.GetRasterBand(1).ReadAsArray()) g = gdal.Open('NETCDF:"' + nc_file + '":' + 'latitude') lat = np.flipud(g.GetRasterBand(1).ReadAsArray()) g = gdal.Open('NETCDF:"' + nc_file + '":' + 'longitude') lon = np.flipud(g.GetRasterBand(1).ReadAsArray()) u10 = ws_10m * np.cos(np.deg2rad(wd_10m)) v10 = ws_10m * np.sin(np.deg2rad(wd_10m)) VRT_u10 = VRT(array=u10, lat=lat, lon=lon) VRT_v10 = VRT(array=v10, lat=lat, lon=lon) # Store bandVRTs so that they are available after reprojection etc self.bandVRTs = {'u_VRT': VRT_u10, 'v_VRT': VRT_v10} metaDict = [] metaDict.append({ 'src': { 'SourceFilename': VRT_u10.fileName, 'SourceBand': 1 }, 'dst': { 'wkv': 'eastward_wind', 'name': 'eastward_wind' } }) metaDict.append({ 'src': { 'SourceFilename': VRT_v10.fileName, 'SourceBand': 1 }, 'dst': { 'wkv': 'northward_wind', 'name': 'northward_wind' } }) # Add pixel function with wind speed metaDict.append({ 'src': [{ 'SourceFilename': self.bandVRTs['u_VRT'].fileName, 'SourceBand': 1, 'DataType': 6 }, { 'SourceFilename': self.bandVRTs['v_VRT'].fileName, 'SourceBand': 1, 'DataType': 6 }], 'dst': { 'wkv': 'wind_speed', 'name': 'windspeed', 'height': '10 m', 'PixelFunctionType': 'UVToMagnitude' } }) # Add pixel function with wind direction metaDict.append({ 'src': [{ 'SourceFilename': self.bandVRTs['u_VRT'].fileName, 'SourceBand': 1, 'DataType': 6 }, { 'SourceFilename': self.bandVRTs['v_VRT'].fileName, 'SourceBand': 1, 'DataType': 6 }], 'dst': { 'wkv': 'wind_to_direction', 'name': 'winddir', 'height': '10 m', 'PixelFunctionType': 'UVToDirectionTo' } }) # create empty VRT dataset with geolocation only VRT.__init__(self, lat=lat, lon=lon) # add bands with metadata and corresponding values # to the empty VRT self._create_bands(metaDict) # Add time self._set_time(fileTime)
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, **kwargs): # check if mapper fits if not gdalMetadata: raise WrongMapperError if not os.path.splitext(inputFileName)[1] == '.mnt': raise WrongMapperError try: mbNorthLatitude = float(gdalMetadata['NC_GLOBAL#mbNorthLatitude']) mbSouthLatitude = float(gdalMetadata['NC_GLOBAL#mbSouthLatitude']) mbEastLongitude = float(gdalMetadata['NC_GLOBAL#mbEastLongitude']) mbWestLongitude = float(gdalMetadata['NC_GLOBAL#mbWestLongitude']) mbProj4String = gdalMetadata['NC_GLOBAL#mbProj4String'] Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines']) Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns']) Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size']) Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size']) except: raise WrongMapperError # find subdataset with DEPTH subDatasets = gdalDataset.GetSubDatasets() dSourceFile = None for subDataset in subDatasets: if subDataset[0].endswith('.mnt":DEPTH'): dSourceFile = subDataset[0] if dSourceFile is None: raise WrongMapperError dSubDataset = gdal.Open(dSourceFile) dMetadata = dSubDataset.GetMetadata() try: scale_factor = dMetadata['DEPTH#scale_factor'] add_offset = dMetadata['DEPTH#add_offset'] except: raise WrongMapperError geoTransform = [ mbWestLongitude, Element_x_size, 0, mbNorthLatitude, 0, -Element_y_size ] # create empty VRT dataset with geolocation only VRT.__init__(self, srcGeoTransform=geoTransform, srcMetadata=gdalMetadata, srcProjection=NSR(mbProj4String).wkt, srcRasterXSize=Number_columns, srcRasterYSize=Number_lines) metaDict = [{ 'src': { 'SourceFilename': dSourceFile, 'SourceBand': 1, 'ScaleRatio': scale_factor, 'ScaleOffset': add_offset }, 'dst': { 'wkv': 'depth' } }] # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict)
def __init__(self, fileName, gdalDataset, gdalMetadata, logLevel=30, **kwargs): if not gdalMetadata: raise WrongMapperError isHirlam = False for key in gdalMetadata.keys(): if 'creation by fimex from file' in gdalMetadata[key]: isHirlam = True if not isHirlam: raise WrongMapperError #GeolocMetaDict = [{'src': # {'SourceFilename': 'NETCDF:"' + fileName + '":longitude', # 'SourceBand': 1, # 'ScaleRatio': 1, # 'ScaleOffset': 0}, # 'dst': {}}, # {'src': # {'SourceFilename': 'NETCDF:"' + fileName + '":latitude', # 'SourceBand': 1, # 'ScaleRatio': 1, # 'ScaleOffset': 0}, # 'dst': {}}] subDataset = gdal.Open('NETCDF:"' + fileName + '":x_wind_10m') #self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize, # srcRasterYSize=subDataset.RasterYSize) #self.GeolocVRT._create_bands(GeolocMetaDict) #GeolocObject = GeolocationArray(xVRT=self.GeolocVRT, # yVRT=self.GeolocVRT, # xBand=1, yBand=2, # lineOffset=0, pixelOffset=0, # lineStep=1, pixelStep=1) ## create empty VRT dataset with geolocation only #VRT.__init__(self, srcRasterXSize = subDataset.RasterXSize, # srcRasterYSize = subDataset.RasterYSize, # geolocationArray = GeolocObject, # srcProjection = GeolocObject.d['SRS']) lon = gdal.Open('NETCDF:"' + fileName + '":longitude"').ReadAsArray() lat = gdal.Open('NETCDF:"' + fileName + '":latitude"').ReadAsArray() VRT.__init__(self, lat=lat, lon=lon) # Add bands with wind components metaDict = [{ 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":x_wind_10m'), 'NODATA': -32767 }, 'dst': { 'name': 'U', 'wkv': 'eastward_wind' } }, { 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":y_wind_10m'), 'NODATA': -32767 }, 'dst': { 'name': 'V', 'wkv': 'northward_wind' } }] # Add pixel function with wind speed metaDict.append({ 'src': [{ 'SourceFilename': ('NETCDF:"' + fileName + '":x_wind_10m'), 'SourceBand': 1, 'DataType': 6 }, { 'SourceFilename': ('NETCDF:"' + fileName + '":y_wind_10m'), 'SourceBand': 1, 'DataType': 6 }], 'dst': { 'wkv': 'wind_speed', 'name': 'windspeed', 'height': '10 m', 'PixelFunctionType': 'UVToMagnitude', 'NODATA': 9999 } }) # add bands with metadata and corresponding values # to the empty VRT self._create_bands(metaDict) # Add time validTime = datetime.datetime.utcfromtimestamp( int(subDataset.GetRasterBand(1).GetMetadata()['NETCDF_DIM_time'])) self._set_time(validTime)
def __init__(self, fileName, gdalDataset, gdalMetadata, resolution='low', **kwargs): ''' Create LANDSAT VRT from multiple tif files or single tar.gz file''' mtlFileName = '' bandFileNames = [] bandSizes = [] bandDatasets = [] fname = os.path.split(fileName)[1] if (fileName.endswith('.tar') or fileName.endswith('.tar.gz') or fileName.endswith('.tgz')): # try to open .tar or .tar.gz or .tgz file with tar try: tarFile = tarfile.open(fileName) except: raise WrongMapperError # collect names of bands and corresponding sizes # into bandsInfo dict and bandSizes list tarNames = sorted(tarFile.getnames()) for tarName in tarNames: # check if TIF files inside TAR qualify if (tarName[0] in ['L', 'M'] and os.path.splitext(tarName)[1] in ['.TIF', '.tif']): # open TIF file from TAR using VSI sourceFilename = '/vsitar/%s/%s' % (fileName, tarName) gdalDatasetTmp = gdal.Open(sourceFilename) # keep name, GDALDataset and size bandFileNames.append(sourceFilename) bandSizes.append(gdalDatasetTmp.RasterXSize) bandDatasets.append(gdalDatasetTmp) elif (tarName.endswith('MTL.txt') or tarName.endswith('MTL.TXT')): # get mtl file mtlFileName = tarName elif ((fname.startswith('L') or fname.startswith('M')) and (fname.endswith('.tif') or fname.endswith('.TIF') or fname.endswith('._MTL.txt'))): # try to find TIF/tif files with the same name as input file path, coreName = os.path.split(fileName) coreName = os.path.splitext(coreName)[0].split('_')[0] coreNameMask = coreName + '*[tT][iI][fF]' tifNames = sorted(glob.glob(os.path.join(path, coreNameMask))) for tifName in tifNames: sourceFilename = tifName gdalDatasetTmp = gdal.Open(sourceFilename) # keep name, GDALDataset and size bandFileNames.append(sourceFilename) bandSizes.append(gdalDatasetTmp.RasterXSize) bandDatasets.append(gdalDatasetTmp) # get mtl file mtlFiles = glob.glob(coreName + '*[mM][tT][lL].[tT][xX][tT]') if len(mtlFiles) > 0: mtlFileName = mtlFiles[0] else: raise WrongMapperError # if not TIF files found - not appropriate mapper if not bandFileNames: raise WrongMapperError # get appropriate band size based on number of unique size and # required resoltuion if resolution == 'low': bandXSise = min(bandSizes) elif resolution in ['high', 'hi']: bandXSise = max(bandSizes) else: raise OptionError('Wrong resolution %s for file %s' % (resolution, fileName)) # find bands with appropriate size and put to metaDict metaDict = [] for bandFileName, bandSize, bandDataset in zip(bandFileNames, bandSizes, bandDatasets): if bandSize == bandXSise: # let last part of file name be suffix bandSuffix = os.path.splitext(bandFileName)[0].split('_')[-1] metaDict.append({ 'src': { 'SourceFilename': bandFileName, 'SourceBand': 1, 'ScaleRatio': 0.1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'suffix': bandSuffix } }) gdalDataset4Use = bandDataset # create empty VRT dataset with geolocation only VRT.__init__(self, gdalDataset4Use) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) if len(mtlFileName) > 0: mtlFileName = os.path.join( os.path.split(bandFileNames[0])[0], mtlFileName) mtlFileLines = [ line.strip() for line in self.read_xml(mtlFileName).split('\n') ] dateString = [ line.split('=')[1].strip() for line in mtlFileLines if ('DATE_ACQUIRED' in line or 'ACQUISITION_DATE' in line) ][0] timeStr = [ line.split('=')[1].strip() for line in mtlFileLines if ('SCENE_CENTER_TIME' in line or 'SCENE_CENTER_SCAN_TIME' in line) ][0] time_start = parse_time(dateString + 'T' + timeStr).isoformat() time_end = (parse_time(dateString + 'T' + timeStr) + datetime.timedelta(microseconds=60000000)).isoformat() self.dataset.SetMetadataItem('time_coverage_start', time_start) self.dataset.SetMetadataItem('time_coverage_end', time_end) # set platform platform = 'LANDSAT' if fname[2].isdigit(): platform += '-' + fname[2] ee = pti.get_gcmd_platform(platform) self.dataset.SetMetadataItem('platform', json.dumps(ee)) # set instrument instrument = { 'LANDSAT': 'MSS', 'LANDSAT-1': 'MSS', 'LANDSAT-2': 'MSS', 'LANDSAT-3': 'MSS', 'LANDSAT-4': 'TM', 'LANDSAT-5': 'TM', 'LANDSAT-7': 'ETM+', 'LANDSAT-8': 'OLI' }[platform] ee = pti.get_gcmd_instrument(instrument) self.dataset.SetMetadataItem('instrument', json.dumps(ee))
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): ''' OBPG L3 VRT ''' # test the product try: assert gdalMetadata['PlatformShortName'] == 'GCOM-W1' assert gdalMetadata['SensorShortName'] == 'AMSR2' assert gdalMetadata['ProductName'] == 'AMSR2-L3' except: raise WrongMapperError # get list of similar (same date, A/D orbit) files in the directory iDir, iFile = os.path.split(fileName) iFileMask = iFile[:30] + '%02d' + iFile[32:] simFiles = [] for freq in self.freqs: simFile = os.path.join(iDir, iFileMask % freq) #print simFile if os.path.exists(simFile): simFiles.append(simFile) metaDict = [] for freq in self.freqs: simFile = os.path.join(iDir, iFileMask % freq) if simFile not in simFiles: continue #print 'simFile', simFile # open file, get metadata and get parameter name simSupDataset = gdal.Open(simFile) if simSupDataset is None: # skip this similar file #print 'No dataset: %s not a supported SMI file' % simFile continue # get subdatasets from the similar file simSubDatasets = simSupDataset.GetSubDatasets() for simSubDataset in simSubDatasets: #print 'simSubDataset', simSubDataset if 'Brightness_Temperature' in simSubDataset[0]: # get SourceFilename from subdataset metaEntry = { 'src': { 'SourceFilename': simSubDataset[0], 'SourceBand': 1, 'ScaleRatio': 0.0099999998, 'ScaleOffset': 0 }, 'dst': { 'wkv': 'brightness_temperature', 'frequency': '%02d' % freq, 'polarisation': simSubDataset[0][-2:-1], 'suffix': ('%02d%s' % (freq, simSubDataset[0][-2:-1])) } } metaDict.append(metaEntry) # initiate VRT for the NSIDC 10 km grid VRT.__init__(self, srcGeoTransform=(-3850000, 10000, 0.0, 5850000, 0.0, -10000), srcProjection=NSR(3411).wkt, srcRasterXSize=760, srcRasterYSize=1120) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # Adding valid time to dataset self.dataset.SetMetadataItem( 'time_coverage_start', parse(gdalMetadata['ObservationStartDateTime']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse(gdalMetadata['ObservationStartDateTime']).isoformat()) mm = pti.get_gcmd_instrument('AMSR2') ee = pti.get_gcmd_platform('GCOM-W1') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT=10, bandNames=['VNIR_Band1', 'VNIR_Band2', 'VNIR_Band3N'], bandWaves=[560, 660, 820], **kwargs): ''' Create VRT Parameters ----------- GCP_COUNT : int number of GCPs along each dimention bandNames : list of string (band name) bandWaves : list of integer (waves corresponding to band name) Band name and waves -------------------- 'VNIR_Band3B' : 820, 'SWIR_Band4' : 1650, 'SWIR_Band5' : 2165, 'SWIR_Band6' : 2205, 'SWIR_Band7' : 2260, 'SWIR_Band8' : 2330, 'SWIR_Band9' : 2395, 'TIR_Band10' : 8300, 'TIR_Band11' : 8650, 'TIR_Band12' : 9100, 'TIR_Band13' : 10600, 'TIR_Band14' : 11300 ''' # check if it is ASTER L1A try: assert 'AST_L1A_' in filename shortName = gdalMetadata['INSTRUMENTSHORTNAME'] assert shortName == 'ASTER' except: raise WrongMapperError subDatasets = gdalDataset.GetSubDatasets() # find datasets for each band and generate metaDict metaDict = [] bandDatasetMask = 'HDF4_EOS:EOS_SWATH:"%s":%s:ImageData' for bandName, bandWave in zip(bandNames, bandWaves): metaEntry = { 'src': { 'SourceFilename': (bandDatasetMask % (filename, bandName)), 'SourceBand': 1, 'DataType': 6, }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': str(bandWave), 'suffix': str(bandWave), } } metaDict.append(metaEntry) # create empty VRT dataset with geolocation only gdalSubDataset = gdal.Open(metaDict[0]['src']['SourceFilename']) self._init_from_gdal_dataset(gdalSubDataset, metadata=gdalSubDataset.GetMetadata()) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) # find largest lon/lat subdatasets latShape0 = 0 for subDataset in subDatasets: if 'Latitude' in subDataset[1]: ls = int(subDataset[1].strip().split('[')[1].split('x')[0]) if ls >= latShape0: latShape0 = ls latSubDS = subDataset[0] if 'Longitude' in subDataset[1]: ls = int(subDataset[1].strip().split('[')[1].split('x')[0]) if ls >= latShape0: latShape0 = ls lonSubDS = subDataset[0] self.logger.debug(latSubDS) self.logger.debug(lonSubDS) # get lat/lon matrices xDataset = gdal.Open(lonSubDS) yDataset = gdal.Open(latSubDS) longitude = xDataset.ReadAsArray() latitude = yDataset.ReadAsArray() step0 = longitude.shape[0] / GCP_COUNT step1 = longitude.shape[1] / GCP_COUNT # estimate pixel/line step pixelStep = int( ceil( float(gdalSubDataset.RasterXSize) / float(xDataset.RasterXSize))) lineStep = int( ceil( float(gdalSubDataset.RasterYSize) / float(xDataset.RasterYSize))) self.logger.debug('steps: %d %d %d %d' % (step0, step1, pixelStep, lineStep)) # generate list of GCPs gcps = [] k = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep, i0 * lineStep) self.logger.debug( '%d %d %d %f %f' % (k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY)) gcps.append(gcp) k += 1 # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) # Adding valid time to dataset self.dataset.SetMetadataItem( 'time_coverage_start', parse(gdalMetadata['FIRSTPACKETTIME']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse(gdalMetadata['LASTPACKETTIME']).isoformat()) mm = pti.get_gcmd_instrument('ASTER') ee = pti.get_gcmd_platform('TERRA') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def test_init_from_GDALDataset(self): ds = gdal.Open(self.test_file) d = Domain(ds=ds) self.assertEqual(type(d), Domain)
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, rmMetadatas=['NETCDF_VARNAME', '_Unsigned', 'ScaleRatio', 'ScaleOffset', 'dods_variable'], **kwargs): # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_' # from keys in gdalDataset tmpGdalMetadata = {} geoMetadata = {} origin_is_nansat = False if not gdalMetadata: raise WrongMapperError for key in gdalMetadata.keys(): newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '') if 'NANSAT_' in newKey: geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key] origin_is_nansat = True else: tmpGdalMetadata[newKey] = gdalMetadata[key] gdalMetadata = tmpGdalMetadata fileExt = os.path.splitext(inputFileName)[1] # Get file names from dataset or subdataset subDatasets = gdalDataset.GetSubDatasets() if len(subDatasets) == 0: filenames = [inputFileName] else: filenames = [f[0] for f in subDatasets] # add bands with metadata and corresponding values to the empty VRT metaDict = [] xDatasetSource = '' yDatasetSource = '' firstXSize = 0 firstYSize = 0 for _, filename in enumerate(filenames): subDataset = gdal.Open(filename) # choose the first dataset whith grid if (firstXSize == 0 and firstYSize == 0 and subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1): firstXSize = subDataset.RasterXSize firstYSize = subDataset.RasterYSize firstSubDataset = subDataset # get projection from the first subDataset projection = firstSubDataset.GetProjection() # take bands whose sizes are same as the first band. if (subDataset.RasterXSize == firstXSize and subDataset.RasterYSize == firstYSize): if projection == '': projection = subDataset.GetProjection() if ('GEOLOCATION_X_DATASET' in filename or 'longitude' in filename): xDatasetSource = filename elif ('GEOLOCATION_Y_DATASET' in filename or 'latitude' in filename): yDatasetSource = filename else: for iBand in range(subDataset.RasterCount): subBand = subDataset.GetRasterBand(iBand+1) bandMetadata = subBand.GetMetadata_Dict() if 'PixelFunctionType' in bandMetadata: bandMetadata.pop('PixelFunctionType') sourceBands = iBand + 1 # sourceBands = i*subDataset.RasterCount + iBand + 1 # generate src metadata src = {'SourceFilename': filename, 'SourceBand': sourceBands} # set scale ratio and scale offset scaleRatio = bandMetadata.get( 'ScaleRatio', bandMetadata.get( 'scale', bandMetadata.get('scale_factor', ''))) if len(scaleRatio) > 0: src['ScaleRatio'] = scaleRatio scaleOffset = bandMetadata.get( 'ScaleOffset', bandMetadata.get( 'offset', bandMetadata.get( 'add_offset', ''))) if len(scaleOffset) > 0: src['ScaleOffset'] = scaleOffset # sate DataType src['DataType'] = subBand.DataType # generate dst metadata # get all metadata from input band dst = bandMetadata # set wkv and bandname dst['wkv'] = bandMetadata.get('standard_name', '') # first, try the name metadata if 'name' in bandMetadata: bandName = bandMetadata['name'] else: # if it doesn't exist get name from NETCDF_VARNAME bandName = bandMetadata.get('NETCDF_VARNAME', '') if len(bandName) == 0: bandName = bandMetadata.get( 'dods_variable', '' ) # remove digits added by gdal in # exporting to netcdf... if (len(bandName) > 0 and origin_is_nansat and fileExt == '.nc'): if bandName[-1:].isdigit(): bandName = bandName[:-1] if bandName[-1:].isdigit(): bandName = bandName[:-1] # if still no bandname, create one if len(bandName) == 0: bandName = 'band_%03d' % iBand dst['name'] = bandName # remove non-necessary metadata from dst for rmMetadata in rmMetadatas: if rmMetadata in dst: dst.pop(rmMetadata) # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(firstSubDataset, metadata=gdalMetadata) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) self._create_complex_bands(filenames) if len(projection) == 0: # projection was not set automatically # get projection from GCPProjection projection = geoMetadata.get('GCPProjection', '') if len(projection) == 0: # no projection was found in dataset or metadata: # generate WGS84 by default projection = NSR().wkt # fix problem with MET.NO files where a, b given in m and XC/YC in km if ('UNIT["kilometre"' in projection and ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in projection): projection = projection.replace( ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', '') # set projection self.dataset.SetProjection(self.repare_projection(projection)) # check if GCPs were added from input dataset gcps = firstSubDataset.GetGCPs() gcpProjection = firstSubDataset.GetGCPProjection() # if no GCPs in input dataset: try to add GCPs from metadata if not gcps: gcps = self.add_gcps_from_metadata(geoMetadata) # if yet no GCPs: try to add GCPs from variables if not gcps: gcps = self.add_gcps_from_variables(inputFileName) if gcps: if len(gcpProjection) == 0: # get GCP projection and repare gcpProjection = self.repare_projection(geoMetadata. get('GCPProjection', '')) # add GCPs to dataset self.dataset.SetGCPs(gcps, gcpProjection) self.dataset.SetProjection('') self._remove_geotransform() # Find proper bands and insert GEOLOCATION ARRAY into dataset if len(xDatasetSource) > 0 and len(yDatasetSource) > 0: self._add_geolocation(Geolocation.from_filenames(xDatasetSource, yDatasetSource)) elif not gcps: # if no GCPs found and not GEOLOCATION ARRAY set: # Set Nansat Geotransform if it is not set automatically geoTransform = self.dataset.GetGeoTransform() if len(geoTransform) == 0: geoTransformStr = geoMetadata.get('GeoTransform', '(0|1|0|0|0|0|1)') geoTransform = eval(geoTransformStr.replace('|', ',')) self.dataset.SetGeoTransform(geoTransform) subMetadata = firstSubDataset.GetMetadata() ### GET START TIME from METADATA time_coverage_start = None if 'start_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_time']) elif 'start_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_date']) elif 'time_coverage_start' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_start']) ### GET END TIME from METADATA time_coverage_end = None if 'stop_time' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['stop_time']) elif 'stop_date' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['stop_date']) elif 'time_coverage_stop' in gdalMetadata: time_coverage_end = parse_time( gdalMetadata['time_coverage_stop']) elif 'end_time' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['end_time']) elif 'end_date' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['end_date']) elif 'time_coverage_end' in gdalMetadata: time_coverage_end = parse_time( gdalMetadata['time_coverage_end']) ### GET start time from time variable if (time_coverage_start is None and 'time#standard_name' in subMetadata and subMetadata['time#standard_name'] == 'time' and 'time#units' in subMetadata): # get data from netcdf data ncFile = Dataset(inputFileName, 'r') time_var = ncFile.variables['time'] t0 = time_var[0] if len(time_var) == 1: t1 = t0 + 1 else: t1 = time_var[-1] time_units_start = parse(time_var.units, fuzzy=True, ignoretz=True) time_units_to_seconds = {'second' : 1.0, 'hour' : 60 * 60.0, 'day' : 24 * 60 * 60.0} for key in time_units_to_seconds: if key in time_var.units: factor = time_units_to_seconds[key] break time_coverage_start = time_units_start + datetime.timedelta(seconds=t0 * factor) time_coverage_end = time_units_start + datetime.timedelta(seconds=t1 * factor) ## finally set values of time_coverage start and end if available if time_coverage_start is not None: self.dataset.SetMetadataItem('time_coverage_start', time_coverage_start.isoformat()) if time_coverage_end is not None: self.dataset.SetMetadataItem('time_coverage_end', time_coverage_end.isoformat()) if 'sensor' not in gdalMetadata: self.dataset.SetMetadataItem('sensor', 'unknown') if 'satellite' not in gdalMetadata: self.dataset.SetMetadataItem('satellite', 'unknown') if 'source_type' not in gdalMetadata: self.dataset.SetMetadataItem('source_type', 'unknown') if 'platform' not in gdalMetadata: self.dataset.SetMetadataItem('platform', 'unknown') if 'instrument' not in gdalMetadata: self.dataset.SetMetadataItem('instrument', 'unknown') self.logger.info('Use generic mapper - OK!')
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT=10, **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' # extension must be .nc if os.path.splitext(filename)[1] != '.nc': raise WrongMapperError # file must contain navigation_data/longitude try: ds = gdal.Open('HDF5:"%s"://navigation_data/longitude' % filename) except RuntimeError: raise WrongMapperError else: dsMetadata = ds.GetMetadata() # title value must be known if dsMetadata.get('title', '') not in self.titles: raise WrongMapperError # get geophysical data variables subDatasets = gdal.Open(filename).GetSubDatasets() metaDict = [] for subDataset in subDatasets: groupName = subDataset[0].split('/')[-2] if groupName not in ['geophysical_data', 'navigation_data']: continue varName = subDataset[0].split('/')[-1] subds = gdal.Open(subDataset[0]) b = subds.GetRasterBand(1) bMetadata = b.GetMetadata() # set SRC/DST parameters metaEntry = { 'src': { 'SourceFilename': subDataset[0], 'sourceBand': 1, 'DataType': b.DataType }, 'dst': { 'name': varName } } # replace datatype for l2_flags if varName == 'l2_flags': metaEntry['src']['DataType'] = 4 metaEntry['src']['SourceType'] = 'SimpleSource' # set scale if exist metaKey = '%s_%s_scale_factor' % (groupName, varName) if metaKey in bMetadata: metaEntry['src']['ScaleRatio'] = bMetadata[metaKey] # set offset if exist metaKey = '%s_%s_add_offset' % (groupName, varName) if metaKey in bMetadata: metaEntry['src']['ScaleOffset'] = bMetadata[metaKey] # set standard_name if exists metaKey = '%s_%s_standard_name' % (groupName, varName) if metaKey in bMetadata: metaEntry['dst']['wkv'] = bMetadata[metaKey] # set other metadata for metaKey in bMetadata: newMetaKey = metaKey.replace('%s_%s_' % (groupName, varName), '') if newMetaKey not in [ 'scale_factor', 'add_offset', 'DIMENSION_LIST', '_FillValue' ]: metaEntry['dst'][newMetaKey] = bMetadata[metaKey] metaDict.append(metaEntry) # make GCPs # get lat/lon grids longitude = gdal.Open('HDF5:"%s"://navigation_data/longitude' % filename).ReadAsArray() latitude = gdal.Open('HDF5:"%s"://navigation_data/latitude' % filename).ReadAsArray() rasterYSize, rasterXSize = longitude.shape step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT)) step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT)) gcps = [] k = 0 center_lon = 0 center_lat = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 + 0.5, i0 + 0.5) gcps.append(gcp) center_lon += lon center_lat += lat k += 1 time_coverage_start = dsMetadata['time_coverage_start'] time_coverage_end = dsMetadata['time_coverage_end'] # create VRT # x_size, y_size, geo_transform, projection, gcps=None, gcp_projection='', **kwargs self._init_from_dataset_params(rasterXSize, rasterYSize, (0, 1, 0, rasterYSize, 0, -1), NSR().wkt, gcps, NSR().wkt) # add bands self.create_bands(metaDict) # reproject GCPs center_lon /= k center_lat /= k srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % ( center_lon, center_lat) self.reproject_GCPs(srs) ### BAD, BAd, bad ... self.dataset.SetProjection(self.dataset.GetGCPProjection()) # use TPS for reprojection self.tps = True # add NansenCloud metadata self.dataset.SetMetadataItem('time_coverage_start', str(time_coverage_start)) self.dataset.SetMetadataItem('time_coverage_end', str(time_coverage_end)) self.dataset.SetMetadataItem('source_type', 'Satellite') self.dataset.SetMetadataItem('mapper', 'obpg_l2_nc') platform = { 'Orbview-2': 'SEASTAR' }.get(dsMetadata.get('platform'), dsMetadata.get('platform')) mm = pti.get_gcmd_instrument(dsMetadata.get('instrument')) ee = pti.get_gcmd_platform(platform) self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_STEP=20, MAX_LAT=90, MIN_LAT=50, resolution='low', **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' ifile = os.path.split(filename)[1] if not ifile.startswith('GW1AM2_') or not ifile.endswith('.h5'): raise WrongMapperError try: ProductName = gdalMetadata['ProductName'] PlatformShortName = gdalMetadata['PlatformShortName'] SensorShortName = gdalMetadata['SensorShortName'] except: raise WrongMapperError if (not ProductName == 'AMSR2-L1R' or not PlatformShortName == 'GCOM-W1' or not SensorShortName == 'AMSR2'): raise WrongMapperError if resolution == 'low': subDatasetWidth = 243 else: subDatasetWidth = 486 # get GCPs from lon/lat grids latGrid = gdal.Open('HDF5:"%s"://Latitude_of_Observation_Point_for_89A' % filename).ReadAsArray() lonGrid = gdal.Open('HDF5:"%s"://Longitude_of_Observation_Point_for_89A' % filename).ReadAsArray() if subDatasetWidth == 243: latGrid = latGrid[:, ::2] lonGrid = lonGrid[:, ::2] dx = .5 dy = .5 gcps = [] k = 0 maxY = 0 minY = latGrid.shape[0] for i0 in range(0, latGrid.shape[0], GCP_STEP): for i1 in range(0, latGrid.shape[1], GCP_STEP): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(lonGrid[i0, i1]) lat = float(latGrid[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= MIN_LAT and lat <= MAX_LAT): gcp = gdal.GCP(lon, lat, 0, i1 + dx, i0 + dy) gcps.append(gcp) k += 1 maxY = max(maxY, i0) minY = min(minY, i0) yOff = minY ySize = maxY - minY # remove Y-offset from gcps for gcp in gcps: gcp.GCPLine -= yOff metaDict = [] subDatasets = gdalDataset.GetSubDatasets() metadata = gdalDataset.GetMetadata() for subDataset in subDatasets: # select subdatasets fro that resolution (width) if (subDatasetWidth == int(subDataset[1].split(']')[0].split('x')[-1]) and 'Latitude' not in subDataset[0] and 'Longitude' not in subDataset[0]): name = subDataset[0].split('/')[-1] # find scale scale = 1 for meta in metadata: if name + '_SCALE' in meta: scale = float(metadata[meta]) # create meta entry metaEntry = {'src': {'SourceFilename': subDataset[0], 'sourceBand': 1, 'ScaleRatio': scale, 'ScaleOffset': 0, 'yOff': yOff, 'ySize': ySize,}, 'dst': {'name': name} } metaDict.append(metaEntry) # create VRT from one of the subdatasets gdalSubDataset = gdal.Open(metaEntry['src']['SourceFilename']) self._init_from_dataset_params(subDatasetWidth, ySize, (1,0,0,ySize,0,-1), NSR().wkt) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) self.dataset.SetMetadataItem('time_coverage_start', parse_time(gdalMetadata['ObservationStartDateTime']).isoformat()) self.dataset.SetMetadataItem('time_coverage_end', parse_time(gdalMetadata['ObservationEndDateTime']).isoformat()) # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) self.reproject_gcps('+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs') self.tps = True mm = pti.get_gcmd_instrument('AMSR2') ee = pti.get_gcmd_platform('GCOM-W1') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): ''' OBPG L3 VRT ''' try: assert 'Level-3 Standard Mapped Image' in gdalMetadata['Title'] except: raise WrongMapperError # get list of similar (same date) files in the directory iDir, iFile = os.path.split(fileName) iFileName, iFileExt = os.path.splitext(iFile) simFilesMask = os.path.join(iDir, iFileName) simFiles = glob.glob(simFilesMask + iFileExt[0:6] + '*') #print 'simFilesMask, simFiles', simFilesMask, simFiles metaDict = [] for simFile in simFiles: #print 'simFile', simFile # open file, get metadata and get parameter name simSupDataset = gdal.Open(simFile) if simSupDataset is None: # skip this similar file #print 'No dataset: %s not a supported SMI file' % simFile continue # get subdatasets from the similar file simSubDatasets = simSupDataset.GetSubDatasets() if len(simSubDatasets) > 0: for simSubDataset in simSubDatasets: #print 'simSubDataset', simSubDataset if 'l3m_data' in simSubDataset[1]: # get SourceFilename from subdataset tmpSourceFilename = simSubDataset[0] break else: # get SourceFilename from dataset tmpSourceFilename = simFile # open subdataset with GDAL #print 'tmpSourceFilename', tmpSourceFilename tmpGdalDataset = gdal.Open(tmpSourceFilename) try: # get metadata, get 'Parameter' tmpGdalMetadata = tmpGdalDataset.GetMetadata() simParameter = tmpGdalMetadata['Parameter'] except: print 'No parameter: %s not a supported SMI file' % simFile continue else: # set params of the similar file simSourceFilename = tmpSourceFilename simGdalDataset = tmpGdalDataset simGdalMetadata = tmpGdalMetadata # get WKV from the similar file #print 'simParameter', simParameter for param in self.param2wkv: #print 'param', param if param in simParameter: simWKV = self.param2wkv[param] break # generate entry to metaDict metaEntry = { 'src': { 'SourceFilename': simSourceFilename, 'SourceBand': 1, 'ScaleRatio': float(simGdalMetadata['Slope']), 'ScaleOffset': float(simGdalMetadata['Intercept']) }, 'dst': { 'wkv': simWKV } } # add wavelength and BandName if ' at ' in simParameter and ' nm' in simParameter: simWavelength = simParameter.split(' at ')[1].split(' nm')[0] metaEntry['dst']['suffix'] = simWavelength metaEntry['dst']['wavelength'] = simWavelength # add band with Rrsw metaEntry2 = None if simWKV == 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air': metaEntry2 = {'src': [metaEntry['src']]} metaEntry2['dst'] = { 'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_water', 'suffix': simWavelength, 'wavelength': simWavelength, 'PixelFunctionType': 'NormReflectanceToRemSensReflectance', } # append entry to metaDict metaDict.append(metaEntry) if metaEntry2 is not None: metaDict.append(metaEntry2) #get array with data and make 'mask' a = simGdalDataset.ReadAsArray() mask = np.zeros(a.shape, 'uint8') + 64 mask[a < -32000] = 1 self.bandVRTs = {'mask': VRT(array=mask)} metaDict.append({ 'src': { 'SourceFilename': self.bandVRTs['mask'].fileName, 'SourceBand': 1 }, 'dst': { 'name': 'mask' } }) # create empty VRT dataset with geolocation only # print 'simGdalMetadata', simGdalMetadata latitudeStep = float( simGdalMetadata.get('Latitude Step', simGdalMetadata.get('Latitude_Step', 1))) longitudeStep = float( simGdalMetadata.get('Longitude Step', simGdalMetadata.get('Longitude_Step', 1))) numberOfColumns = int( simGdalMetadata.get('Number of Columns', simGdalMetadata.get('Number_of_Columns', 1))) numberOfLines = int( simGdalMetadata.get('Number of Lines', simGdalMetadata.get('Number_of_Lines', 1))) #longitudeStep = float(simGdalMetadata['Longitude Step']) VRT.__init__(self, srcGeoTransform=(-180.0, longitudeStep, 0.0, 90.0, 0.0, -longitudeStep), srcProjection=NSR().wkt, srcRasterXSize=numberOfColumns, srcRasterYSize=numberOfLines) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # Add valid time startYear = int( simGdalMetadata.get('Start Year', simGdalMetadata.get('Start_Year', 1))) startDay = int( simGdalMetadata.get('Start Day', simGdalMetadata.get('Start)Day', 1))) self._set_time( datetime.datetime(startYear, 1, 1) + datetime.timedelta(startDay))
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): ''' Create LANDSAT VRT ''' # try to open .tar or .tar.gz or .tgz file with tar try: tarFile = tarfile.open(fileName) except: raise WrongMapperError tarNames = tarFile.getnames() #print tarNames metaDict = [] for tarName in tarNames: if ((tarName[0] == 'L' or tarName[0] == 'M') and (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')): #print tarName bandNo = tarName[-6:-4] metaDict.append({ 'src': { 'SourceFilename': '/vsitar/%s/%s' % (fileName, tarName), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'suffix': bandNo } }) if not metaDict: raise WrongMapperError #print metaDict sizeDiffBands = [] for iFile in range(len(metaDict)): tmpName = metaDict[iFile]['src']['SourceFilename'] gdalDatasetTmp = gdal.Open(tmpName) if iFile == 0: gdalDatasetTmp0 = gdalDatasetTmp xSize = gdalDatasetTmp.RasterXSize ySize = gdalDatasetTmp.RasterYSize elif (xSize != gdalDatasetTmp.RasterXSize or ySize != gdalDatasetTmp.RasterYSize): sizeDiffBands.append(iFile) # create empty VRT dataset with geolocation only VRT.__init__(self, gdalDatasetTmp0) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # 8th band of LANDSAT8 is a double size band. # Reduce the size to same as the 1st band. if len(sizeDiffBands) != 0: vrtXML = self.read_xml() node0 = Node.create(vrtXML) for iBand in sizeDiffBands: iBandNode = node0.nodeList('VRTRasterBand')[iBand] iNodeDstRect = iBandNode.node('DstRect') iNodeDstRect.replaceAttribute('xSize', str(xSize)) iNodeDstRect.replaceAttribute('ySize', str(ySize)) self.write_xml(node0.rawxml())
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): ''' GLOBCOLOR L3M VRT ''' try: print("=>%s<=" % gdalMetadata['NC_GLOBAL#title']) except (TypeError, KeyError): raise WrongMapperError if 'GlobColour' not in gdalMetadata['NC_GLOBAL#title']: raise WrongMapperError # get list of similar (same date) files in the directory iDir, iFile = os.path.split(filename) iFileName, iFileExt = os.path.splitext(iFile) print('idir:', iDir, iFile, iFileName[0:30], iFileExt[0:8]) simFilesMask = os.path.join(iDir, iFileName[0:30] + '*.nc') simFiles = glob.glob(simFilesMask) print('simFilesMask, simFiles', simFilesMask, simFiles) metaDict = [] for simFile in simFiles: print('simFile', simFile) # open file, get metadata and get parameter name simSupDataset = gdal.Open(simFile) simSubDatasets = simSupDataset.GetSubDatasets() simWKV = None for simSubDataset in simSubDatasets: if '_mean' in simSubDataset[0]: simValidSupDataset = simSupDataset simGdalDataset = gdal.Open(simSubDataset[0]) simBand = simGdalDataset.GetRasterBand(1) simBandMetadata = simBand.GetMetadata() simVarname = simBandMetadata['NETCDF_VARNAME'] # get WKV print(' simVarname', simVarname) if simVarname in self.varname2wkv: simWKV = self.varname2wkv[simVarname] break # skipp adding this similar file if it is not valid if simWKV is None: continue metaEntry = { 'src': { 'SourceFilename': simSubDataset[0], 'SourceBand': 1 }, 'dst': { 'wkv': simWKV, 'original_name': simVarname } } # add wavelength and name longName = simBandMetadata['long_name'] if 'Fully normalised water leaving radiance' in longName: simWavelength = simVarname.split('L')[1].split('_mean')[0] metaEntry['dst']['suffix'] = simWavelength metaEntry['dst']['wavelength'] = simWavelength # add band with rrsw metaEntry2 = None if simWKV == 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water': solarIrradiance = simBandMetadata['solar_irradiance'] metaEntry2 = {'src': metaEntry['src']} metaEntry2['dst'] = { 'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea' '_water_to_downwelling_radiative_flux_in_water', 'suffix': simWavelength, 'wavelength': simWavelength, # 'expression': 'self["nLw_%s"] / %s / (0.52 + 1.7 * self["nLw # _%s"] / %s)' % (simWavelength, solarIrradiance, # simWavelength, solarIrradiance), 'expression': 'self["nLw_%s"] / %s' % (simWavelength, solarIrradiance) } print(' metaEntry', metaEntry) metaDict.append(metaEntry) if metaEntry2 is not None: print(' metaEntry2', metaEntry2) metaDict.append(metaEntry2) print('simSubDatasets', simValidSupDataset.GetSubDatasets()) for simSubDataset in simValidSupDataset.GetSubDatasets(): print('simSubDataset', simSubDataset) if '_flags ' in simSubDataset[1]: print(' mask simSubDataset', simSubDataset[1]) flags = gdal.Open(simSubDataset[0]).ReadAsArray() mask = np.ones(flags.shape) * 64 mask[np.bitwise_and(flags, np.power(2, 0)) > 0] = 1 mask[np.bitwise_and(flags, np.power(2, 3)) > 0] = 2 self.band_vrts = {'maskVRT': VRT(array=mask)} metaDict.append({ 'src': { 'SourceFilename': self.band_vrts['maskVRT'].filename, 'SourceBand': 1 }, 'dst': { 'name': 'mask' } }) # create empty VRT dataset with geolocation only simGdalDataset.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,' '298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG",' '"6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4326"]]') self._init_from_gdal_dataset(simGdalDataset) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) # Add valid time startYear = int(gdalMetadata['Start Year']) startDay = int(gdalMetadata['Start Day']) # Adding valid time to dataset self.dataset.SetMetadataItem( 'time_coverage_start', (datetime.datetime(startYear, 1, 1) + datetime.timedelta(startDay)).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', (datetime.datetime(startYear, 1, 1) + datetime.timedelta(startDay)).isoformat())
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): ''' Ocean Productivity website VRT ''' try: assert 'IDL' in gdalMetadata['Projection Category'] assert '-9999' in gdalMetadata['Hole Value'] except: raise WrongMapperError print 'Ocean Productivity website data' # get list of similar (same date) files in the directory iDir, iFile = os.path.split(fileName) iFileName, iFileExt = os.path.splitext(iFile) simFilesMask = os.path.join(iDir, '*' + iFileName[4:11] + iFileExt) #print 'simFilesMask', simFilesMask simFiles = glob.glob(simFilesMask) #print 'simFiles', simFiles metaDict = [] for simFile in simFiles: #print 'simFile',simFile # open subdataset with GDAL tmpSourceFilename = simFile tmpGdalDataset = gdal.Open(tmpSourceFilename) # get metadata, get 'Parameter' tmpGdalMetadata = tmpGdalDataset.GetMetadata() iDir, ifileName = os.path.split(tmpSourceFilename) #print 'ifileName',ifileName simParameter = ifileName[0:3] # set params of the similar file simSourceFilename = tmpSourceFilename simGdalDataset = tmpGdalDataset simGdalMetadata = tmpGdalMetadata # get WKV from the similar file for param in self.param2wkv: #print 'param', param if param in simParameter: simWKV = self.param2wkv[param] break #print 'simWKV', simWKV # generate entry to metaDict metaEntry = { 'src': { 'SourceFilename': simSourceFilename, 'SourceBand': 1, 'ScaleRatio': float(simGdalMetadata['Slope']), 'ScaleOffset': float(simGdalMetadata['Intercept']) }, 'dst': { 'wkv': simWKV, 'name': self.bandNames[simWKV], 'Parameter': simParameter } } #print 'metaEntry', metaEntry # append entry to metaDict metaDict.append(metaEntry) #get array with data and make 'mask' a = simGdalDataset.ReadAsArray() mask = np.zeros(a.shape, 'uint8') + 128 mask[a < -9990] = 1 self.bandVRTs = {'maskVRT': VRT(array=mask)} metaDict.append({ 'src': { 'SourceFilename': (self.bandVRTs['maskVRT'].fileName), 'SourceBand': 1 }, 'dst': { 'name': 'mask' } }) # create empty VRT dataset with geolocation only # print 'simGdalMetadata', simGdalMetadata latitudeStep = 0.08333334 longitudeStep = 0.08333334 numberOfColumns = 4320 numberOfLines = 2160 #longitudeStep = float(simGdalMetadata['Longitude Step']) VRT.__init__( self, srcGeoTransform=(-180.0, longitudeStep, 0.0, 90.0, 0.0, -longitudeStep), srcProjection= 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]', srcRasterXSize=numberOfColumns, srcRasterYSize=numberOfLines) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # Add valid time startYear = int(iFile[4:8]) startDay = int(iFile[8:11]) self.dataset.SetMetadataItem( 'time_coverage_start', (datetime.datetime(startYear, 1, 1) + datetime.timedelta(startDay)).isoformat())
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): ''' Create Radarsat2 VRT ''' fPathName, fExt = os.path.splitext(fileName) if zipfile.is_zipfile(fileName): # Open zip file using VSI fPath, fName = os.path.split(fPathName) fileName = '/vsizip/%s/%s' % (fileName, fName) if not 'RS' in fName[0:2]: raise WrongMapperError('Provided data is not Radarsat-2') gdalDataset = gdal.Open(fileName) gdalMetadata = gdalDataset.GetMetadata() #if it is not RADARSAT-2, return if (not gdalMetadata or not 'SATELLITE_IDENTIFIER' in gdalMetadata.keys()): raise WrongMapperError elif gdalMetadata['SATELLITE_IDENTIFIER'] != 'RADARSAT-2': raise WrongMapperError # read product.xml productXmlName = os.path.join(fileName, 'product.xml') productXml = self.read_xml(productXmlName) # Get additional metadata from product.xml rs2_0 = Node.create(productXml) rs2_1 = rs2_0.node('sourceAttributes') rs2_2 = rs2_1.node('radarParameters') if rs2_2['antennaPointing'].lower() == 'right': antennaPointing = 90 else: antennaPointing = -90 rs2_3 = rs2_1.node('orbitAndAttitude').node('orbitInformation') passDirection = rs2_3['passDirection'] # create empty VRT dataset with geolocation only VRT.__init__(self, gdalDataset) #define dictionary of metadata and band specific parameters pol = [] metaDict = [] # Get the subdataset with calibrated sigma0 only for dataset in gdalDataset.GetSubDatasets(): if dataset[1] == 'Sigma Nought calibrated': s0dataset = gdal.Open(dataset[0]) s0datasetName = dataset[0][:] band = s0dataset.GetRasterBand(1) s0datasetPol = band.GetMetadata()['POLARIMETRIC_INTERP'] for i in range(1, s0dataset.RasterCount + 1): iBand = s0dataset.GetRasterBand(i) polString = iBand.GetMetadata()['POLARIMETRIC_INTERP'] suffix = polString # The nansat data will be complex # if the SAR data is of type 10 dtype = iBand.DataType if dtype == 10: # add intensity band metaDict.append({ 'src': { 'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:' + fileName + '/product.xml'), 'SourceBand': i, 'DataType': dtype }, 'dst': { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'intensity', 'SourceTransferType': gdal.GetDataTypeName(dtype), 'suffix': suffix, 'polarization': polString, 'dataType': 6 } }) # modify suffix for adding the compled band below suffix = polString + '_complex' pol.append(polString) metaDict.append({ 'src': { 'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:' + fileName + '/product.xml'), 'SourceBand': i, 'DataType': dtype }, 'dst': { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'suffix': suffix, 'polarization': polString } }) if dataset[1] == 'Beta Nought calibrated': b0dataset = gdal.Open(dataset[0]) b0datasetName = dataset[0][:] for j in range(1, b0dataset.RasterCount + 1): jBand = b0dataset.GetRasterBand(j) polString = jBand.GetMetadata()['POLARIMETRIC_INTERP'] if polString == s0datasetPol: b0datasetBand = j ############################### # Add SAR look direction ############################### d = Domain(ds=gdalDataset) lon, lat = d.get_geolocation_grids(100) ''' (GDAL?) Radarsat-2 data is stored with maximum latitude at first element of each column and minimum longitude at first element of each row (e.g. np.shape(lat)=(59,55) -> latitude maxima are at lat[0,:], and longitude minima are at lon[:,0]) In addition, there is an interpolation error for direct estimate along azimuth. We therefore estimate the heading along range and add 90 degrees to get the "satellite" heading. ''' if str(passDirection).upper() == 'DESCENDING': sat_heading = initial_bearing(lon[:, :-1], lat[:, :-1], lon[:, 1:], lat[:, 1:]) + 90 elif str(passDirection).upper() == 'ASCENDING': sat_heading = initial_bearing(lon[:, 1:], lat[:, 1:], lon[:, :-1], lat[:, :-1]) + 90 else: print 'Can not decode pass direction: ' + str(passDirection) # Calculate SAR look direction SAR_look_direction = sat_heading + antennaPointing # Interpolate to regain lost row SAR_look_direction = np.mod(SAR_look_direction, 360) SAR_look_direction = scipy.ndimage.interpolation.zoom( SAR_look_direction, (1, 11. / 10.)) # Decompose, to avoid interpolation errors around 0 <-> 360 SAR_look_direction_u = np.sin(np.deg2rad(SAR_look_direction)) SAR_look_direction_v = np.cos(np.deg2rad(SAR_look_direction)) look_u_VRT = VRT(array=SAR_look_direction_u, lat=lat, lon=lon) look_v_VRT = VRT(array=SAR_look_direction_v, lat=lat, lon=lon) # Note: If incidence angle and look direction are stored in # same VRT, access time is about twice as large lookVRT = VRT(lat=lat, lon=lon) lookVRT._create_band([{ 'SourceFilename': look_u_VRT.fileName, 'SourceBand': 1 }, { 'SourceFilename': look_v_VRT.fileName, 'SourceBand': 1 }], {'PixelFunctionType': 'UVToDirectionTo'}) # Blow up to full size lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize, gdalDataset.RasterYSize) # Store VRTs so that they are accessible later self.bandVRTs['look_u_VRT'] = look_u_VRT self.bandVRTs['look_v_VRT'] = look_v_VRT self.bandVRTs['lookVRT'] = lookVRT # Add band to full sized VRT lookFileName = self.bandVRTs['lookVRT'].fileName metaDict.append({ 'src': { 'SourceFilename': lookFileName, 'SourceBand': 1 }, 'dst': { 'wkv': 'sensor_azimuth_angle', 'name': 'SAR_look_direction' } }) ############################### # Create bands ############################### self._create_bands(metaDict) ################################################### # Add derived band (incidence angle) calculated # using pixel function "BetaSigmaToIncidence": ################################################### src = [{ 'SourceFilename': b0datasetName, 'SourceBand': b0datasetBand, 'DataType': dtype }, { 'SourceFilename': s0datasetName, 'SourceBand': 1, 'DataType': dtype }] dst = { 'wkv': 'angle_of_incidence', 'PixelFunctionType': 'BetaSigmaToIncidence', 'SourceTransferType': gdal.GetDataTypeName(dtype), '_FillValue': -10000, # NB: this is also hard-coded in # pixelfunctions.c 'dataType': 6, 'name': 'incidence_angle' } self._create_band(src, dst) self.dataset.FlushCache() ################################################################### # Add sigma0_VV - pixel function of sigma0_HH and beta0_HH # incidence angle is calculated within pixel function # It is assummed that HH is the first band in sigma0 and # beta0 sub datasets ################################################################### if 'VV' not in pol and 'HH' in pol: s0datasetNameHH = pol.index('HH') + 1 src = [{ 'SourceFilename': s0datasetName, 'SourceBand': s0datasetNameHH, 'DataType': 6 }, { 'SourceFilename': b0datasetName, 'SourceBand': b0datasetBand, 'DataType': 6 }] dst = { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'Sigma0HHBetaToSigma0VV', 'polarization': 'VV', 'suffix': 'VV' } self._create_band(src, dst) self.dataset.FlushCache() ############################################ # Add SAR metadata ############################################ if antennaPointing == 90: self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT') if antennaPointing == -90: self.dataset.SetMetadataItem('ANTENNA_POINTING', 'LEFT') self.dataset.SetMetadataItem('ORBIT_DIRECTION', str(passDirection).upper()) # Set time validTime = gdalDataset.GetMetadata()['ACQUISITION_START_TIME'] self.logger.info('Valid time: %s', str(validTime)) self._set_time(parse(validTime)) # set SADCAT specific metadata self.dataset.SetMetadataItem( 'start_date', (parse(gdalMetadata['FIRST_LINE_TIME']).isoformat())) self.dataset.SetMetadataItem( 'stop_date', (parse(gdalMetadata['LAST_LINE_TIME']).isoformat())) self.dataset.SetMetadataItem('sensor', 'SAR') self.dataset.SetMetadataItem('satellite', 'Radarsat2') self.dataset.SetMetadataItem('mapper', 'radarsat2')
def test_dont_init_if_gdal_AutoCreateWarpedVRT_fails(self, mock_gdal): mock_gdal.AutoCreateWarpedVRT.return_value = None with self.assertRaises(NansatProjectionError): Domain(ds=gdal.Open(self.test_file), srs="+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs")
def __init__(self, fileName, gdalDataset, gdalMetadata, product_type='RVL', GCP_COUNT=10, **kwargs): ''' Parameters ---------- product_type: string Sentinel-1 level-2 ocean product type/component, i.e. ocean swell spectra (OSW), ocean wind field (OWI), or radial surface velocity (RVL) (RVL is the default) GCP_COUNT : int number of GCPs along each dimention ''' fPathName, fExt = os.path.splitext(fileName) # List of Sentinel-1 level-2 components unwanted_product_components = ['osw', 'owi', 'rvl'] # Remove requested 'product_type' from list of unwanted unwanted_product_components.pop( unwanted_product_components.index(product_type.lower())) # Check if it is Sentinel-1 (or ASAR) level-2 (in S1 data format) if not gdalMetadata or not 'NC_GLOBAL' in gdalMetadata.keys(): raise WrongMapperError else: title = gdalMetadata['NC_GLOBAL#TITLE'] # Raise error if it is not Sentinel-1 format if not 'Sentinel-1' or 'ASA' in title: raise WrongMapperError metadata = {} for key, val in gdalMetadata.iteritems(): new_key = key.split('#')[-1] metadata[new_key] = val subDatasets = gdalDataset.GetSubDatasets() fileNames = [f[0] for f in subDatasets] rm_bands = [] # Find all data that is not relevant for the selected product type # and get bands of longitude, latitude and zero doppler time for i, f in enumerate(fileNames): if f.split(':')[-1][:3] in unwanted_product_components: rm_bands.append(i) if 'Lon' in f.split(':')[-1]: lon_ds = gdal.Open(f) rm_bands.append(i) if 'Lat' in f.split(':')[-1]: lat_ds = gdal.Open(f) rm_bands.append(i) if 'ZeroDopplerTime' in f.split(':')[-1]: zdt_ds = gdal.Open(f) rm_bands.append(i) # Remove bands in rm_bands from the list of bands to add to the Nansat # object fileNames = [f for i, f in enumerate(fileNames) if not i in rm_bands] # ( # 'Lon' in f.split(':')[-1] or # 'Lat' in f.split(':')[-1] or # 'ZeroDopplerTime' in f.split(':')[-1] )] # create empty VRT dataset VRT.__init__(self, gdal.Open(subDatasets[0][0]), srcMetadata=metadata) # The zero Doppler time grid is 3-dimensional - the last dimension is a # char array with the time as year, month, day, etc. # Will not bother with it yet... #for iBand in range(zdt_ds.RasterCount): # subBand = zdt_ds.GetRasterBand(iBand+1) XSize = lon_ds.RasterXSize YSize = lon_ds.RasterYSize # get projection from the lon and lat datasets longitude = lon_ds.ReadAsArray() latitude = lat_ds.ReadAsArray() # estimate step of GCPs step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT)) step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT)) self.logger.debug('gcpCount: >%s<, %d %d %f %d %d', title, latitude.shape[0], latitude.shape[1], GCP_COUNT, step0, step1) # estimate pixel/line step of the geolocation arrays pixelStep = 1 lineStep = 1 self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep)) # generate list of GCPs dx = .5 dy = .5 gcps = [] k = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep + dx, i0 * lineStep + dy) self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY) gcps.append(gcp) k += 1 # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) # define band specific parameters metaDict = [] geoFileDict = {} xDatasetSource = '' yDatasetSource = '' for i, fileName in enumerate(fileNames): band = gdal.Open(fileName) # check that the band size is the same size as the latitude and # longitude grids if (band.RasterXSize != XSize or band.RasterYSize != YSize): raise IndexError(('Size of sub-dataset is different from size ' 'of longitude and latitude grids')) bandMetadata = band.GetMetadata() # generate src metadata src = {'SourceFilename': fileName, 'SourceBand': 1} # Generate dst metadata short_name = fileName.split(':')[-1] dst = { 'name': short_name, 'short_name': short_name, 'long_name': bandMetadata[short_name + '#long_name'], 'units': bandMetadata[short_name + '#units'], #'wkv': , } # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) metaDict = [] for i in range(self.dataset.RasterCount): if 'Nrcs' in self.dataset.GetRasterBand(i + 1).GetMetadata()['name']: metaDict.append({ 'src': { 'SourceFilename': (self.dataset.GetRasterBand( i + 1).GetMetadata()['SourceFilename']), 'SourceBand': 1 }, 'dst': { 'short_name': 'sigma0', 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'dB2pow', 'polarization': (self.dataset.GetMetadata()['POLARISATION']), 'suffix': self.dataset.GetMetadata()['POLARISATION'], 'dataType': 6, } }) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # set time self.dataset.SetMetadataItem( 'time_coverage_start', parse(self.dataset.GetMetadata() ['SOURCE_ACQUISITION_UTC_TIME']).isoformat())
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): ''' Create CSKS VRT ''' if fileName.split('/')[-1][0:4] != "CSKS": raise WrongMapperError # Get coordinates metadata = gdalMetadata['Estimated_Bottom_Left_Geodetic_Coordinates'] bottom_left_lon = float(metadata.split(' ')[1]) bottom_left_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Bottom_Right_Geodetic_Coordinates'] bottom_right_lon = float(metadata.split(' ')[1]) bottom_right_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Top_Left_Geodetic_Coordinates'] top_left_lon = float(metadata.split(' ')[1]) top_left_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Top_Right_Geodetic_Coordinates'] top_right_lon = float(metadata.split(' ')[1]) top_right_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Scene_Centre_Geodetic_Coordinates'] center_lon = float(metadata.split(' ')[1]) center_lat = float(metadata.split(' ')[0]) # Get sub-datasets subDatasets = gdalDataset.GetSubDatasets() # Get file names from dataset or subdataset if subDatasets.__len__() == 1: fileNames = [fileName] else: fileNames = [f[0] for f in subDatasets] for i, elem in enumerate(fileNames): if fileNames[i][-3:] == 'QLK': fileNames.pop(i) #print fileNames subDataset = gdal.Open(fileNames[0]) # generate list of GCPs gcps = [] # create GCP with X,Y,Z(?),pixel,line from lat/lon matrices gcp = gdal.GCP(float(bottom_left_lon), float(bottom_left_lat), 0, 0, 0) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 0, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(bottom_right_lon), float(bottom_right_lat), 0, subDataset.RasterXSize, 0) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 1, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(top_left_lon), float(top_left_lat), 0, 0, subDataset.RasterYSize) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 2, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(top_right_lon), float(top_right_lat), 0, subDataset.RasterXSize, subDataset.RasterYSize) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 3, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(center_lon), float(center_lat), 0, int(np.round(subDataset.RasterXSize / 2.)), int(round(subDataset.RasterYSize / 2.))) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 4, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) # append GCPs and lat/lon projection to the vsiDataset latlongSRS = osr.SpatialReference() latlongSRS.ImportFromProj4( "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") latlongSRSWKT = latlongSRS.ExportToWkt() # create empty VRT dataset with geolocation only VRT.__init__(self, srcRasterXSize=subDataset.RasterXSize, srcRasterYSize=subDataset.RasterYSize, srcGCPs=gcps, srcGCPProjection=latlongSRSWKT) #print self.fileName # Read all bands later #band='S01' #res='SBI' # Use only full size "original" datasets for i, elem in enumerate(fileNames): if fileNames[i][-3:] == 'SBI': # Add real and imaginary raw counts as bands src = { 'SourceFilename': fileNames[i], 'SourceBand': 1, 'DataType': gdal.GDT_Int16 } dst = { 'dataType': gdal.GDT_Float32, 'name': 'RawCounts_%s_real' % gdalMetadata[fileNames[i][-7:-4] + '_Polarisation'] } self._create_band(src, dst) src = { 'SourceFilename': fileNames[i], 'SourceBand': 2, 'DataType': gdal.GDT_Int16 } dst = { 'dataType': gdal.GDT_Float32, 'name': 'RawCounts_%s_imaginary' % gdalMetadata[fileNames[i][-7:-4] + '_Polarisation'] } self._create_band(src, dst) self.dataset.FlushCache() for i, elem in enumerate(fileNames): if fileNames[i][-3:] == 'SBI': # Calculate sigma0 scaling factor Rref = float(gdalMetadata['Reference_Slant_Range']) Rexp = float(gdalMetadata['Reference_Slant_Range_Exponent']) alphaRef = float(gdalMetadata['Reference_Incidence_Angle']) F = float(gdalMetadata['Rescaling_Factor']) K = float(gdalMetadata[fileNames[i][-7:-4] + '_Calibration_Constant']) Ftot = Rref**(2. * Rexp) Ftot *= np.sin(alphaRef * np.pi / 180.0) Ftot /= F**2. Ftot /= K #print Ftot src = [{ 'SourceFilename': self.fileName, 'DataType': gdal.GDT_Float32, 'SourceBand': 2 * i + 1, 'ScaleRatio': np.sqrt(Ftot) }, { 'SourceFilename': self.fileName, 'DataType': gdal.GDT_Float32, 'SourceBand': 2 * i + 2, 'ScaleRatio': np.sqrt(Ftot) }] dst = { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'RawcountsToSigma0_CosmoSkymed_SBI', 'polarisation': gdalMetadata[fileNames[i][-7:-4] + '_Polarisation'], 'name': 'sigma0_%s' % gdalMetadata[fileNames[i][-7:-4] + '_Polarisation'], 'SatelliteID': gdalMetadata['Satellite_ID'], 'dataType': gdal.GDT_Float32 } #'pass': gdalMetadata[''] # - I can't find this in the metadata... self._create_band(src, dst) self.dataset.FlushCache()
def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, mask='', **kwargs): ''' Create VRT Parameters ----------- fileName : string gdalDataset : gdal dataset gdalMetadata : gdal metadata latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid ''' # test if input files is ASCAT iDir, iFile = os.path.split(fileName) iFileName, iFileExt = os.path.splitext(iFile) try: assert iFileName[0:6] == 'ascat_' and iFileExt == '.nc' except: raise WrongMapperError # Create geolocation subDataset = gdal.Open('NETCDF:"' + fileName + '":lat') self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize, srcRasterYSize=subDataset.RasterYSize) GeolocMetaDict = [{ 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":lon'), 'SourceBand': 1, 'ScaleRatio': 0.00001, 'ScaleOffset': -360 }, 'dst': {} }, { 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":lat'), 'SourceBand': 1, 'ScaleRatio': 0.00001, 'ScaleOffset': 0 }, 'dst': {} }] self.GeolocVRT._create_bands(GeolocMetaDict) GeolocObject = GeolocationArray( xVRT=self.GeolocVRT, yVRT=self.GeolocVRT, # x = lon, y = lat xBand=1, yBand=2, lineOffset=0, pixelOffset=0, lineStep=1, pixelStep=1) # create empty VRT dataset with geolocation only VRT.__init__(self, srcRasterXSize=subDataset.RasterXSize, srcRasterYSize=subDataset.RasterYSize, gdalDataset=subDataset, geolocationArray=GeolocObject, srcProjection=GeolocObject.d['SRS']) # Scale and NODATA should ideally be taken directly from raw file metaDict = [{ 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":wind_speed'), 'ScaleRatio': 0.01, 'NODATA': -32767 }, 'dst': { 'name': 'wind_speed', 'wkv': 'wind_speed' } }, { 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":wind_dir'), 'ScaleRatio': 0.1, 'NODATA': -32767 }, 'dst': { 'name': 'wind_direction', 'wkv': 'wind_direction' } }] self._create_bands(metaDict) # This should not be necessary # - should be provided by GeolocationArray! self.dataset.SetProjection(GeolocObject.d['SRS']) # Add time startTime = datetime.datetime(int(iFileName[6:10]), int(iFileName[10:12]), int(iFileName[12:14]), int(iFileName[15:17]), int(iFileName[17:19]), int(iFileName[19:21])) # Adding valid time to dataset self.dataset.SetMetadataItem('time_coverage_start', startTime.isoformat()) self.dataset.SetMetadataItem('time_coverage_end', startTime.isoformat()) # set SADCAT specific metadata self.dataset.SetMetadataItem('sensor', 'ASCAT') self.dataset.SetMetadataItem('satellite', 'Metop-A') warnings.warn("Setting satellite to Metop-A - update mapper if it is" \ " e.g. Metop-B") self.dataset.SetMetadataItem('mapper', 'ascat_nasa')
def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, mask='', **kwargs): ''' Create VRT Parameters ----------- fileName : string gdalDataset : gdal dataset gdalMetadata : gdal metadata latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid ''' # test if input files is ASCAT iDir, iFile = os.path.split(fileName) iFileName, iFileExt = os.path.splitext(iFile) try: assert iFileName[0:6] == 'ascat_' and iFileExt == '.nc' except: raise WrongMapperError # Create geolocation subDataset = gdal.Open('NETCDF:"' + fileName + '":lat') self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize, srcRasterYSize=subDataset.RasterYSize) GeolocMetaDict = [{ 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":lon'), 'SourceBand': 1, 'ScaleRatio': 0.00001, 'ScaleOffset': -360 }, 'dst': {} }, { 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":lat'), 'SourceBand': 1, 'ScaleRatio': 0.00001, 'ScaleOffset': 0 }, 'dst': {} }] self.GeolocVRT._create_bands(GeolocMetaDict) GeolocObject = GeolocationArray( xVRT=self.GeolocVRT, yVRT=self.GeolocVRT, # x = lon, y = lat xBand=1, yBand=2, lineOffset=0, pixelOffset=0, lineStep=1, pixelStep=1) # create empty VRT dataset with geolocation only VRT.__init__(self, srcRasterXSize=subDataset.RasterXSize, srcRasterYSize=subDataset.RasterYSize, gdalDataset=subDataset, geolocationArray=GeolocObject, srcProjection=GeolocObject.d['SRS']) # Scale and NODATA should ideally be taken directly from raw file metaDict = [{ 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":wind_speed'), 'ScaleRatio': 0.01, 'NODATA': -32767 }, 'dst': { 'name': 'windspeed', 'wkv': 'wind_speed' } }, { 'src': { 'SourceFilename': ('NETCDF:"' + fileName + '":wind_dir'), 'ScaleRatio': 0.1, 'NODATA': -32767 }, 'dst': { 'name': 'winddirection', 'wkv': 'wind_from_direction' } }] self._create_bands(metaDict) # This should not be necessary # - should be provided by GeolocationArray! self.dataset.SetProjection(GeolocObject.d['SRS']) # Add time startTime = datetime.datetime(int(iFileName[6:10]), int(iFileName[10:12]), int(iFileName[12:14]), int(iFileName[15:17]), int(iFileName[17:19]), int(iFileName[19:21])) # Adding valid time to dataset self.dataset.SetMetadataItem('time_coverage_start', startTime.isoformat()) self.dataset.SetMetadataItem('time_coverage_end', startTime.isoformat()) # Get dictionary describing the instrument and platform according to # the GCMD keywords mm = pti.get_gcmd_instrument('ascat') ee = pti.get_gcmd_platform('metop-a') # TODO: Validate that the found instrument and platform are indeed what # we want.... self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, fast=False, **kwargs): if kwargs.get('manifestonly', False): fast = True NansatFutureWarning( 'manifestonly option will be deprecated. Use: fast=True') if not os.path.split(filename.rstrip('/'))[1][:3] in ['S1A', 'S1B']: raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename) if not IMPORT_SCIPY: raise NansatReadError( 'Sentinel-1 data cannot be read because scipy is not installed' ) if zipfile.is_zipfile(filename): zz = zipfile.PyZipFile(filename) # Assuming the file names are consistent, the polarization # dependent data should be sorted equally such that we can use the # same indices consistently for all the following lists # THIS IS NOT THE CASE... mds_files = [ '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist() if 'measurement/s1' in fn ] calibration_files = [ '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist() if 'annotation/calibration/calibration-s1' in fn ] noise_files = [ '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist() if 'annotation/calibration/noise-s1' in fn ] annotation_files = [ '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist() if 'annotation/s1' in fn ] manifest_files = [ '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist() if 'manifest.safe' in fn ] zz.close() else: mds_files = glob.glob('%s/measurement/s1*' % filename) calibration_files = glob.glob( '%s/annotation/calibration/calibration-s1*' % filename) noise_files = glob.glob('%s/annotation/calibration/noise-s1*' % filename) annotation_files = glob.glob('%s/annotation/s1*' % filename) manifest_files = glob.glob('%s/manifest.safe' % filename) if (not mds_files or not calibration_files or not noise_files or not annotation_files or not manifest_files): raise WrongMapperError(filename) # convert list of MDS files into dictionary. Keys - polarizations in upper case. mds_files = { os.path.basename(ff).split('-')[3].upper(): ff for ff in mds_files } polarizations = list(mds_files.keys()) # read annotation files annotation_data = self.read_annotation(annotation_files) if not fast: annotation_data = Mapper.correct_geolocation_data(annotation_data) # read manifest file manifest_data = self.read_manifest_data(manifest_files[0]) # very fast constructor without any bands only with some metadata and geolocation self._init_empty(manifest_data, annotation_data) # skip adding bands in the fast mode and RETURN if fast: return # Open data files with GDAL gdalDatasets = {} for pol in polarizations: gdalDatasets[pol] = gdal.Open(mds_files[pol]) if not gdalDatasets[pol]: raise WrongMapperError('%s: No Sentinel-1 datasets found' % mds_files[pol]) # Check metadata to confirm it is Sentinel-1 L1 metadata = gdalDatasets[polarizations[0]].GetMetadata() # create full size VRTs with incidenceAngle and elevationAngle annotation_vrts = self.vrts_from_arrays( annotation_data, ['incidenceAngle', 'elevationAngle']) self.band_vrts.update(annotation_vrts) # create full size VRTS with calibration LUT calibration_names = ['sigmaNought', 'betaNought'] calibration_list_tag = 'calibrationVectorList' for calibration_file in calibration_files: pol = '_' + os.path.basename(calibration_file).split( '-')[4].upper() xml = self.read_vsi(calibration_file) calibration_data = self.read_calibration(xml, calibration_list_tag, calibration_names, pol) calibration_vrts = self.vrts_from_arrays(calibration_data, calibration_names, pol, True, 1) self.band_vrts.update(calibration_vrts) # create full size VRTS with noise LUT for noise_file in noise_files: pol = '_' + os.path.basename(noise_file).split('-')[4].upper() xml = self.read_vsi(noise_file) if '<noiseVectorList' in xml: noise_list_tag = 'noiseVectorList' noise_name = 'noiseLut' elif '<noiseRangeVectorList' in xml: noise_list_tag = 'noiseRangeVectorList' noise_name = 'noiseRangeLut' noise_data = self.read_calibration(xml, noise_list_tag, [noise_name], pol) noise_vrts = self.vrts_from_arrays(noise_data, [noise_name], pol, True, 1) self.band_vrts.update(noise_vrts) #### Create metaDict: dict with metadata for all bands metaDict = [] bandNumberDict = {} bnmax = 0 for pol in polarizations: dsPath, dsName = os.path.split(mds_files[pol]) name = 'DN_%s' % pol # A dictionary of band numbers is needed for the pixel function # bands further down. This is not the best solution. It would be # better to have a function in VRT that returns the number given a # band name. This function exists in Nansat but could perhaps be # moved to VRT? The existing nansat function could just call the # VRT one... bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] band = gdalDatasets[pol].GetRasterBand(1) dtype = band.DataType metaDict.append({ 'src': { 'SourceFilename': mds_files[pol], 'SourceBand': 1, 'DataType': dtype, }, 'dst': { 'name': name, }, }) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) ''' Calibration should be performed as s0 = DN^2/sigmaNought^2, where sigmaNought is from e.g. annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml, and DN is the Digital Numbers in the tiff files. Also the noise should be subtracted. See https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT The noise correction/subtraction is implemented in an independent package "sentinel1denoised" See https://github.com/nansencenter/sentinel1denoised ''' # Get look direction longitude, latitude = self.transform_points( calibration_data['pixel'].flatten(), calibration_data['line'].flatten()) longitude.shape = calibration_data['pixel'].shape latitude.shape = calibration_data['pixel'].shape sat_heading = initial_bearing(longitude[:-1, :], latitude[:-1, :], longitude[1:, :], latitude[1:, :]) look_direction = scipy.ndimage.interpolation.zoom( np.mod(sat_heading + 90, 360), (np.shape(longitude)[0] / (np.shape(longitude)[0] - 1.), 1)) # Decompose, to avoid interpolation errors around 0 <-> 360 look_direction_u = np.sin(np.deg2rad(look_direction)) look_direction_v = np.cos(np.deg2rad(look_direction)) look_u_VRT = VRT.from_array(look_direction_u) look_v_VRT = VRT.from_array(look_direction_v) lookVRT = VRT.from_lonlat(longitude, latitude) lookVRT.create_band([{ 'SourceFilename': look_u_VRT.filename, 'SourceBand': 1 }, { 'SourceFilename': look_v_VRT.filename, 'SourceBand': 1 }], {'PixelFunctionType': 'UVToDirectionTo'}) # Blow up to full size lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize, self.dataset.RasterYSize, 1) # Store VRTs so that they are accessible later self.band_vrts['look_u_VRT'] = look_u_VRT self.band_vrts['look_v_VRT'] = look_v_VRT self.band_vrts['lookVRT'] = lookVRT metaDict = [] # Add bands to full size VRT for pol in polarizations: name = 'sigmaNought_%s' % pol bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] metaDict.append({ 'src': { 'SourceFilename': (self.band_vrts[name].filename), 'SourceBand': 1 }, 'dst': { 'name': name } }) name = 'noise_%s' % pol bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] metaDict.append({ 'src': { 'SourceFilename': self.band_vrts['%s_%s' % (noise_name, pol)].filename, 'SourceBand': 1 }, 'dst': { 'name': name } }) name = 'look_direction' bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] metaDict.append({ 'src': { 'SourceFilename': self.band_vrts['lookVRT'].filename, 'SourceBand': 1 }, 'dst': { 'wkv': 'sensor_azimuth_angle', 'name': name } }) for pol in polarizations: dsPath, dsName = os.path.split(mds_files[pol]) name = 'sigma0_%s' % pol bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] metaDict.append({ 'src': [{ 'SourceFilename': self.filename, 'SourceBand': bandNumberDict['DN_%s' % pol], }, { 'SourceFilename': self.band_vrts['sigmaNought_%s' % pol].filename, 'SourceBand': 1 }], 'dst': { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'Sentinel1Calibration', 'polarization': pol, 'suffix': pol, }, }) name = 'beta0_%s' % pol bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] metaDict.append({ 'src': [{ 'SourceFilename': self.filename, 'SourceBand': bandNumberDict['DN_%s' % pol] }, { 'SourceFilename': self.band_vrts['betaNought_%s' % pol].filename, 'SourceBand': 1 }], 'dst': { 'wkv': 'surface_backwards_brightness_coefficient_of_radar_wave', 'PixelFunctionType': 'Sentinel1Calibration', 'polarization': pol, 'suffix': pol, }, }) self.create_bands(metaDict) # Add incidence angle as band name = 'incidence_angle' bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] src = { 'SourceFilename': self.band_vrts['incidenceAngle'].filename, 'SourceBand': 1 } dst = {'wkv': 'angle_of_incidence', 'name': name} self.create_band(src, dst) self.dataset.FlushCache() # Add elevation angle as band name = 'elevation_angle' bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] src = { 'SourceFilename': self.band_vrts['elevationAngle'].filename, 'SourceBand': 1 } dst = {'wkv': 'angle_of_elevation', 'name': name} self.create_band(src, dst) self.dataset.FlushCache() # Add sigma0_VV if 'VV' not in polarizations and 'HH' in polarizations: name = 'sigma0_VV' bandNumberDict[name] = bnmax + 1 bnmax = bandNumberDict[name] src = [{ 'SourceFilename': self.filename, 'SourceBand': bandNumberDict['DN_HH'], }, { 'SourceFilename': (self.band_vrts['sigmaNought_HH'].filename), 'SourceBand': 1, }, { 'SourceFilename': self.band_vrts['incidenceAngle'].filename, 'SourceBand': 1 }] dst = { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV', 'polarization': 'VV', 'suffix': 'VV' } self.create_band(src, dst) self.dataset.FlushCache()
def __init__(self, fileName, gdalDataset, gdalMetadata, GCP_COUNT=10, **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' titles = [ 'HMODISA Level-2 Data', 'MODISA Level-2 Data', 'MERIS Level-2 Data', 'GOCI Level-2 Data', 'VIIRSN Level-2 Data' ] # should raise error in case of not obpg_l2 file try: title = gdalMetadata["Title"] except: raise WrongMapperError if title not in titles: raise WrongMapperError # get subdataset and parse to VRT.__init__() # for retrieving geo-metadata # but NOT from longitude or latitude because it can be smaller! subDatasets = gdalDataset.GetSubDatasets() for subDataset in subDatasets: if ('longitude' not in subDataset[1] and 'latitude' not in subDataset[1]): gdalSubDataset = gdal.Open(subDataset[0]) break if title is 'GOCI Level-2 Data': # set GOCI projection parameters rasterXSize = 5567 rasterYSize = 5685 proj4 = '+proj=ortho +lat_0=36 +lon_0=130 units=m +ellps=WGS84 +datum=WGS84 +no_defs' srs = osr.SpatialReference() srs.ImportFromProj4(proj4) projection = srs.ExportToWkt() geoTransform = (-1391500.0, 500.0, 0.0, 1349500.0, 0.0, -500.0) # create empty VRT dataset with georeference only VRT.__init__(self, srcGeoTransform=geoTransform, srcProjection=projection, srcRasterXSize=rasterXSize, srcRasterYSize=rasterYSize) else: # create empty VRT dataset with geolocation only VRT.__init__(self, gdalSubDataset) # parts of dictionary for all Reflectances #dictRrs = {'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air', 'wavelength': '412'} } # dictionary for all possible bands allBandsDict = { 'Rrs': { 'src': {}, 'dst': { 'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air' } }, 'Kd': { 'src': {}, 'dst': { 'wkv': 'volume_attenuation_coefficient_of_downwelling_radiative_flux_in_sea_water' } }, 'chlor_a': { 'src': {}, 'dst': { 'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water', 'case': 'I' } }, 'cdom_index': { 'src': {}, 'dst': { 'wkv': 'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter', 'case': 'II' } }, 'sst': { 'src': {}, 'dst': { 'wkv': 'sea_surface_temperature' } }, 'sst4': { 'src': {}, 'dst': { 'wkv': 'sea_surface_temperature' } }, 'l2_flags': { 'src': { 'SourceType': 'SimpleSource', 'DataType': 4 }, 'dst': { 'wkv': 'quality_flags', 'dataType': 4 } }, 'qual_sst': { 'src': { 'SourceType': 'SimpleSource', 'DataType': 4 }, 'dst': { 'wkv': 'quality_flags', 'name': 'qual_sst', 'dataType': 4 } }, 'qual_sst4': { 'src': { 'SourceType': 'SimpleSource', 'DataType': 4 }, 'dst': { 'wkv': 'quality_flags', 'name': 'qual_sst', 'dataType': 4 } }, 'latitude': { 'src': {}, 'dst': { 'wkv': 'latitude' } }, 'longitude': { 'src': {}, 'dst': { 'wkv': 'longitude' } } } # loop through available bands and generate metaDict (non fixed) metaDict = [] bandNo = 0 for subDataset in subDatasets: # get sub dataset name subDatasetName = subDataset[1].split(' ')[1] self.logger.debug('Subdataset: %s' % subDataset[1]) self.logger.debug('Subdataset name: "%s"' % subDatasetName) # get wavelength if applicable, get dataset name without wavelength try: wavelength = int(subDatasetName.split('_')[-1]) except: wavelength = None subBandName = subDatasetName else: subBandName = subDatasetName.split('_')[0] self.logger.debug('subBandName, wavelength: %s %s' % (subBandName, str(wavelength))) if subBandName in allBandsDict: # get name, slope, intercept self.logger.debug('name: %s' % subBandName) tmpSubDataset = gdal.Open(subDataset[0]) tmpSubMetadata = tmpSubDataset.GetMetadata() slope = tmpSubMetadata.get('slope', '1') intercept = tmpSubMetadata.get('intercept', '0') self.logger.debug('slope, intercept: %s %s ' % (slope, intercept)) # create meta entry metaEntry = { 'src': { 'SourceFilename': subDataset[0], 'sourceBand': 1, 'ScaleRatio': slope, 'ScaleOffset': intercept }, 'dst': {} } # add more to src for srcKey in allBandsDict[subBandName]['src']: metaEntry['src'][srcKey] = ( allBandsDict[subBandName]['src'][srcKey]) # add dst from allBandsDict for dstKey in allBandsDict[subBandName]['dst']: metaEntry['dst'][dstKey] = ( allBandsDict[subBandName]['dst'][dstKey]) # add wavelength, band name to dst if wavelength is not None: metaEntry['dst']['suffix'] = str(wavelength) metaEntry['dst']['wavelength'] = str(wavelength) # append band metadata to metaDict self.logger.debug('metaEntry: %d => %s' % (bandNo, str(metaEntry))) metaDict.append(metaEntry) bandNo += 1 if subBandName == 'Rrs': metaEntryRrsw = { 'src': [{ 'SourceFilename': subDataset[0], 'SourceBand': 1, 'ScaleRatio': slope, 'ScaleOffset': intercept, 'DataType': 6 }], 'dst': { 'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_water', 'suffix': str(wavelength), 'wavelength': str(wavelength), 'PixelFunctionType': 'NormReflectanceToRemSensReflectance', } } # append band metadata to metaDict self.logger.debug('metaEntry: %d => %s' % (bandNo, str(metaEntryRrsw))) metaDict.append(metaEntryRrsw) bandNo += 1 # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # set TIME startYear = int(gdalMetadata['Start Year']) startDay = int(gdalMetadata['Start Day']) startMillisec = int(gdalMetadata['Start Millisec']) startDate = datetime(startYear, 1, 1) + timedelta( startDay - 1, 0, 0, startMillisec) self._set_time(startDate) # skip adding georeference for GOCI if title is 'GOCI Level-2 Data': return self._remove_geotransform() # add geolocation geoMeta = self.geolocationArray.d if len(geoMeta) > 0: self.dataset.SetMetadata(geoMeta, 'GEOLOCATION') # add GCPs geolocationMetadata = gdalSubDataset.GetMetadata('GEOLOCATION') xDatasetSource = geolocationMetadata['X_DATASET'] xDataset = gdal.Open(xDatasetSource) yDatasetSource = geolocationMetadata['Y_DATASET'] yDataset = gdal.Open(yDatasetSource) longitude = xDataset.ReadAsArray() latitude = yDataset.ReadAsArray() # estimate pixel/line step of the geolocation arrays pixelStep = int( ceil( float(gdalSubDataset.RasterXSize) / float(xDataset.RasterXSize))) lineStep = int( ceil( float(gdalSubDataset.RasterYSize) / float(xDataset.RasterYSize))) self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep)) # ==== ADD GCPs and Pojection ==== # estimate step of GCPs step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT)) step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT)) if str(title) == 'VIIRSN Level-2 Data': step0 = 64 self.logger.debug('gcpCount: >%s<, %d %d %f %d %d', title, latitude.shape[0], latitude.shape[1], GCP_COUNT, step0, step1) # generate list of GCPs dx = .5 dy = .5 gcps = [] k = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep + dx, i0 * lineStep + dy) self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY) gcps.append(gcp) k += 1 # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt)