def add_gcps_from_variables(self, filename): ''' Get GCPs from GCPPixel, GCPLine, GCPX, GCPY, GCPZ variables ''' gcpVariables = [ 'GCPX', 'GCPY', 'GCPZ', 'GCPPixel', 'GCPLine', ] # open input netCDF file for reading GCPs try: ncFile = Dataset(filename, 'r') except (TypeError, IOError) as e: self.logger.info('%s' % e) return None # check if all GCP variables exist in the file if not all([var in ncFile.variables for var in gcpVariables]): return None # get data from GCP variables into array varData = [ncFile.variables[var][:] for var in gcpVariables] varData = np.array(varData) # close input file ncFile.close() # create list of GDAL.GCPs gcps = [ gdal.GCP(float(x), float(y), float(z), float(pixel), float(line)) for x, y, z, pixel, line in varData.T ] return gcps
def add_gcps_from_metadata(self, geoMetadata): '''Get GCPs from strings in metadata and insert in dataset''' gcpNames = ['GCPPixel', 'GCPLine', 'GCPX', 'GCPY'] gcpAllValues = [] # for all gcp coordinates for i, gcpName in enumerate(gcpNames): # scan throught metadata and find how many lines with each GCP gcpLineCount = 0 for metaDataItem in geoMetadata: if gcpName in metaDataItem: gcpLineCount += 1 # concat all lines gcpString = '' for n in range(0, gcpLineCount): gcpLineName = '%s_%03d' % (gcpName, n) gcpString += geoMetadata[gcpLineName] # convert strings to floats gcpString = gcpString.strip().replace(' ', '') gcpValues = [] # append all gcps from string for x in gcpString.split('|'): if len(x) > 0: gcpValues.append(float(x)) # gcpValues = [float(x) for x in gcpString.strip().split('|')] gcpAllValues.append(gcpValues) # create list of GDAL GCPs gcps = [] for i in range(0, len(gcpAllValues[0])): gcps.append( gdal.GCP(gcpAllValues[2][i], gcpAllValues[3][i], 0, gcpAllValues[0][i], gcpAllValues[1][i])) return gcps
def get_gcps(self, flip_gcp_line=False): """ Get Ground Control Points for the dataset. Note that OPeNDAP streams and netCDF files are read differently by gdal. The OPeNDAP streams are read by specifying the get parameters to the OPeNDAP url. The get parameters specify the reference dimensions, e.g., x and y. Since these are specified, the raster data is correctly referenced to the GCPs. However, when gdal reads a raster band from netCDF, it reads it "blindly". This is risky, since the definition of origo may be different in gdal vs the original data (e.g., first line starts in upper left corner or in lower left corner). For Sentinel-1, the raster data is flipped in relation to the GCPs, so we need to flip the GCP line vector as well. """ lon = self.ds.variables['GCP_longitude_' + self.ds.polarisation[:2]][:] lat = self.ds.variables['GCP_latitude_' + self.ds.polarisation[:2]][:] line = self.ds.variables['GCP_line_' + self.ds.polarisation[:2]][:] if flip_gcp_line: # Flip line vector line = self.ds.dimensions['y'].size - line pixel = self.ds.variables['GCP_pixel_' + self.ds.polarisation[:2]][:] gcps = [] for i0 in range(0, self.ds.dimensions['gcp_index'].size): gcp = gdal.GCP(float(lon[i0]), float(lat[i0]), 0, float(pixel[i0]), float(line[i0])) gcps.append(gcp) return gcps
def init_from_xml(self, productXml): ''' Fast init from metada in XML only ''' numberOfLines = int( productXml.node('imageAttributes').node('rasterAttributes').node( 'numberOfLines').value) numberOfSamples = int( productXml.node('imageAttributes').node('rasterAttributes').node( 'numberOfSamplesPerLine').value) VRT.__init__(self, srcRasterXSize=numberOfSamples, srcRasterYSize=numberOfLines) gcps = [] geogrid = productXml.node('imageAttributes').node( 'geographicInformation').node('geolocationGrid') for child in geogrid.children: pix = float(child.node('imageCoordinate').node('pixel').value) lin = float(child.node('imageCoordinate').node('line').value) lon = float( child.node('geodeticCoordinate').node('longitude').value) lat = float( child.node('geodeticCoordinate').node('latitude').value) gcps.append(gdal.GCP(lon, lat, 0, pix, lin)) self.dataset.SetGCPs(gcps, NSR().wkt) dates = list( map(parse, [ child.node('timeStamp').value for child in (productXml.node( 'sourceAttributes').node('orbitAndAttitude').node( 'orbitInformation').nodeList('stateVector')) ])) self.dataset.SetMetadataItem('time_coverage_start', min(dates).isoformat()) self.dataset.SetMetadataItem('time_coverage_end', max(dates).isoformat()) self.dataset.SetMetadataItem( 'platform', json.dumps(pti.get_gcmd_platform('radarsat-2'))) self.dataset.SetMetadataItem( 'instrument', json.dumps(pti.get_gcmd_instrument('SAR'))) self.dataset.SetMetadataItem('Entry Title', 'Radarsat-2 SAR') self.dataset.SetMetadataItem('Data Center', 'CSA') self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans') self.dataset.SetMetadataItem('Summary', 'Radarsat-2 SAR data')
def create_gcps(x, y, z, p, l): """ Create GCPs from geolocation data Parameters ---------- x, y, z, p, l N-D arrays with value of X, Y, Z, Pixel and Line coordinates. X and Y are typically lon, lat, Z - height. Returns ------- gcps : list with GDAL GCPs """ gcps = [] for xi, yi, zi, pi, li in zip(x.flat, y.flat, z.flat, p.flat, l.flat): gcps.append(gdal.GCP(xi, yi, zi, pi, li)) return gcps
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_STEP=20, MAX_LAT=90, MIN_LAT=50, resolution='low', **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' ifile = os.path.split(filename)[1] if not ifile.startswith('GW1AM2_') or not ifile.endswith('.h5'): raise WrongMapperError try: ProductName = gdalMetadata['ProductName'] PlatformShortName = gdalMetadata['PlatformShortName'] SensorShortName = gdalMetadata['SensorShortName'] except: raise WrongMapperError if (not ProductName == 'AMSR2-L1R' or not PlatformShortName == 'GCOM-W1' or not SensorShortName == 'AMSR2'): raise WrongMapperError if resolution == 'low': subDatasetWidth = 243 else: subDatasetWidth = 486 # get GCPs from lon/lat grids latGrid = gdal.Open( 'HDF5:"%s"://Latitude_of_Observation_Point_for_89A' % filename).ReadAsArray() lonGrid = gdal.Open( 'HDF5:"%s"://Longitude_of_Observation_Point_for_89A' % filename).ReadAsArray() if subDatasetWidth == 243: latGrid = latGrid[:, ::2] lonGrid = lonGrid[:, ::2] dx = .5 dy = .5 gcps = [] k = 0 maxY = 0 minY = latGrid.shape[0] for i0 in range(0, latGrid.shape[0], GCP_STEP): for i1 in range(0, latGrid.shape[1], GCP_STEP): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(lonGrid[i0, i1]) lat = float(latGrid[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= MIN_LAT and lat <= MAX_LAT): gcp = gdal.GCP(lon, lat, 0, i1 + dx, i0 + dy) gcps.append(gcp) k += 1 maxY = max(maxY, i0) minY = min(minY, i0) yOff = minY ySize = maxY - minY # remove Y-offset from gcps for gcp in gcps: gcp.GCPLine -= yOff metaDict = [] subDatasets = gdalDataset.GetSubDatasets() metadata = gdalDataset.GetMetadata() for subDataset in subDatasets: # select subdatasets fro that resolution (width) if (subDatasetWidth == int( subDataset[1].split(']')[0].split('x')[-1]) and 'Latitude' not in subDataset[0] and 'Longitude' not in subDataset[0]): name = subDataset[0].split('/')[-1] # find scale scale = 1 for meta in metadata: if name + '_SCALE' in meta: scale = float(metadata[meta]) # create meta entry metaEntry = { 'src': { 'SourceFilename': subDataset[0], 'sourceBand': 1, 'ScaleRatio': scale, 'ScaleOffset': 0, 'yOff': yOff, 'ySize': ySize, }, 'dst': { 'name': name } } metaDict.append(metaEntry) # create VRT from one of the subdatasets gdalSubDataset = gdal.Open(metaEntry['src']['SourceFilename']) self._init_from_dataset_params(subDatasetWidth, ySize, (1, 0, 0, ySize, 0, -1), NSR().wkt) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) self.dataset.SetMetadataItem( 'time_coverage_start', parse_time(gdalMetadata['ObservationStartDateTime']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse_time(gdalMetadata['ObservationEndDateTime']).isoformat()) # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) self.reproject_gcps( '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs' ) self.tps = True mm = pti.get_gcmd_instrument('AMSR2') ee = pti.get_gcmd_platform('GCOM-W1') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT=10, **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' # extension must be .nc if os.path.splitext(filename)[1] != '.nc': raise WrongMapperError # file must contain navigation_data/longitude try: ds = gdal.Open('HDF5:"%s"://navigation_data/longitude' % filename) except RuntimeError: raise WrongMapperError else: dsMetadata = ds.GetMetadata() # title value must be known if dsMetadata.get('title', '') not in self.titles: raise WrongMapperError # get geophysical data variables subDatasets = gdal.Open(filename).GetSubDatasets() metaDict = [] for subDataset in subDatasets: groupName = subDataset[0].split('/')[-2] if groupName not in ['geophysical_data', 'navigation_data']: continue varName = subDataset[0].split('/')[-1] subds = gdal.Open(subDataset[0]) b = subds.GetRasterBand(1) bMetadata = b.GetMetadata() # set SRC/DST parameters metaEntry = { 'src': { 'SourceFilename': subDataset[0], 'sourceBand': 1, 'DataType': b.DataType }, 'dst': { 'name': varName } } # replace datatype for l2_flags if varName == 'l2_flags': metaEntry['src']['DataType'] = 4 metaEntry['src']['SourceType'] = 'SimpleSource' # set scale if exist metaKey = '%s_%s_scale_factor' % (groupName, varName) if metaKey in bMetadata: metaEntry['src']['ScaleRatio'] = bMetadata[metaKey] # set offset if exist metaKey = '%s_%s_add_offset' % (groupName, varName) if metaKey in bMetadata: metaEntry['src']['ScaleOffset'] = bMetadata[metaKey] # set standard_name if exists metaKey = '%s_%s_standard_name' % (groupName, varName) if metaKey in bMetadata: metaEntry['dst']['wkv'] = bMetadata[metaKey] # set other metadata for metaKey in bMetadata: newMetaKey = metaKey.replace('%s_%s_' % (groupName, varName), '') if newMetaKey not in [ 'scale_factor', 'add_offset', 'DIMENSION_LIST', '_FillValue' ]: metaEntry['dst'][newMetaKey] = bMetadata[metaKey] metaDict.append(metaEntry) # make GCPs # get lat/lon grids longitude = gdal.Open('HDF5:"%s"://navigation_data/longitude' % filename).ReadAsArray() latitude = gdal.Open('HDF5:"%s"://navigation_data/latitude' % filename).ReadAsArray() rasterYSize, rasterXSize = longitude.shape step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT)) step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT)) gcps = [] k = 0 center_lon = 0 center_lat = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 + 0.5, i0 + 0.5) gcps.append(gcp) center_lon += lon center_lat += lat k += 1 time_coverage_start = dsMetadata['time_coverage_start'] time_coverage_end = dsMetadata['time_coverage_end'] # create VRT # x_size, y_size, geo_transform, projection, gcps=None, gcp_projection='', **kwargs self._init_from_dataset_params(rasterXSize, rasterYSize, (0, 1, 0, rasterYSize, 0, -1), NSR().wkt, gcps, NSR().wkt) # add bands self.create_bands(metaDict) # reproject GCPs center_lon /= k center_lat /= k srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % ( center_lon, center_lat) self.reproject_gcps(srs) ### BAD, BAd, bad ... self.dataset.SetProjection(self.dataset.GetGCPProjection()) # use TPS for reprojection self.tps = True # add NansenCloud metadata self.dataset.SetMetadataItem('time_coverage_start', str(time_coverage_start)) self.dataset.SetMetadataItem('time_coverage_end', str(time_coverage_end)) self.dataset.SetMetadataItem('source_type', 'Satellite') self.dataset.SetMetadataItem('mapper', 'obpg_l2_nc') platform = { 'Orbview-2': 'SEASTAR' }.get(dsMetadata.get('platform'), dsMetadata.get('platform')) mm = pti.get_gcmd_instrument(dsMetadata.get('instrument')) ee = pti.get_gcmd_platform(platform) self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT=10, **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' # should raise error in case of not obpg_l2 file try: title = gdalMetadata["Title"] except: raise WrongMapperError if title not in self.titles: raise WrongMapperError # get subdataset and parse to VRT.__init__() # for retrieving geo-metadata # but NOT from longitude or latitude because it can be smaller! subDatasets = gdalDataset.GetSubDatasets() for subDataset in subDatasets: if ('longitude' not in subDataset[1] and 'latitude' not in subDataset[1]): gdalSubDataset = gdal.Open(subDataset[0]) break if title is 'GOCI Level-2 Data': # set GOCI projection parameters rasterXSize = 5567 rasterYSize = 5685 proj4 = '+proj=ortho +lat_0=36 +lon_0=130 units=m +ellps=WGS84 +datum=WGS84 +no_defs' srs = osr.SpatialReference() srs.ImportFromProj4(proj4) projection = srs.ExportToWkt() geoTransform = (-1391500.0, 500.0, 0.0, 1349500.0, 0.0, -500.0) # create empty VRT dataset with georeference only self._init_from_dataset_params(rasterXSize, rasterYSize, geoTransform, projection) else: # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(gdalSubDataset) # parts of dictionary for all Reflectances #dictRrs = {'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air', 'wavelength': '412'} } # dictionary for all possible bands allBandsDict = { 'Rrs': { 'src': {}, 'dst': { 'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air' } }, 'Kd': { 'src': {}, 'dst': { 'wkv': 'volume_attenuation_coefficient_of_downwelling_radiative_flux_in_sea_water' } }, 'chlor_a': { 'src': {}, 'dst': { 'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water', 'case': 'I' } }, 'cdom_index': { 'src': {}, 'dst': { 'wkv': 'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter', 'case': 'II' } }, 'sst': { 'src': {}, 'dst': { 'wkv': 'sea_surface_temperature' } }, 'sst4': { 'src': {}, 'dst': { 'wkv': 'sea_surface_temperature' } }, 'l2_flags': { 'src': { 'SourceType': 'SimpleSource', 'DataType': 4 }, 'dst': { 'wkv': 'quality_flags', 'dataType': 4 } }, 'qual_sst': { 'src': { 'SourceType': 'SimpleSource', 'DataType': 4 }, 'dst': { 'wkv': 'quality_flags', 'name': 'qual_sst', 'dataType': 4 } }, 'qual_sst4': { 'src': { 'SourceType': 'SimpleSource', 'DataType': 4 }, 'dst': { 'wkv': 'quality_flags', 'name': 'qual_sst', 'dataType': 4 } }, 'latitude': { 'src': {}, 'dst': { 'wkv': 'latitude' } }, 'longitude': { 'src': {}, 'dst': { 'wkv': 'longitude' } }, 'par': { 'src': {}, 'dst': { 'wkv': 'downwelling_photosynthetic_photon_radiance_in_sea_water' } }, 'ipar': { 'src': {}, 'dst': { 'wkv': 'instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water' } }, } # loop through available bands and generate metaDict (non fixed) metaDict = [] bandNo = 0 for subDataset in subDatasets: # get sub dataset name subDatasetName = subDataset[1].split(' ')[1] self.logger.debug('Subdataset: %s' % subDataset[1]) self.logger.debug('Subdataset name: "%s"' % subDatasetName) # get wavelength if applicable, get dataset name without wavelength try: wavelength = int(subDatasetName.split('_')[-1]) except: wavelength = None subBandName = subDatasetName else: subBandName = subDatasetName.split('_')[0] self.logger.debug('subBandName, wavelength: %s %s' % (subBandName, str(wavelength))) if subBandName in allBandsDict: # get name, slope, intercept self.logger.debug('name: %s' % subBandName) tmpSubDataset = gdal.Open(subDataset[0]) tmpSubMetadata = tmpSubDataset.GetMetadata() slope = tmpSubMetadata.get('slope', '1') intercept = tmpSubMetadata.get('intercept', '0') self.logger.debug('slope, intercept: %s %s ' % (slope, intercept)) # create meta entry metaEntry = { 'src': { 'SourceFilename': subDataset[0], 'sourceBand': 1, 'ScaleRatio': slope, 'ScaleOffset': intercept }, 'dst': {} } # add more to src for srcKey in allBandsDict[subBandName]['src']: metaEntry['src'][srcKey] = ( allBandsDict[subBandName]['src'][srcKey]) # add dst from allBandsDict for dstKey in allBandsDict[subBandName]['dst']: metaEntry['dst'][dstKey] = ( allBandsDict[subBandName]['dst'][dstKey]) # add wavelength, band name to dst if wavelength is not None: metaEntry['dst']['suffix'] = str(wavelength) metaEntry['dst']['wavelength'] = str(wavelength) # append band metadata to metaDict self.logger.debug('metaEntry: %d => %s' % (bandNo, str(metaEntry))) metaDict.append(metaEntry) bandNo += 1 if subBandName == 'Rrs': rrsSubDataset = subDataset[0] metaEntryRrsw = { 'src': [{ 'SourceFilename': subDataset[0], 'SourceBand': 1, 'ScaleRatio': slope, 'ScaleOffset': intercept, 'DataType': 6 }], 'dst': { 'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_water', 'suffix': str(wavelength), 'wavelength': str(wavelength), 'PixelFunctionType': 'NormReflectanceToRemSensReflectance', } } # append band metadata to metaDict self.logger.debug('metaEntry: %d => %s' % (bandNo, str(metaEntryRrsw))) metaDict.append(metaEntryRrsw) bandNo += 1 # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) # set TIME startYear = int(gdalMetadata['Start Year']) startDay = int(gdalMetadata['Start Day']) startMillisec = int(gdalMetadata['Start Millisec']) startDate = datetime(startYear, 1, 1) + timedelta( startDay - 1, 0, 0, startMillisec) # skip adding georeference for GOCI if title is 'GOCI Level-2 Data': return self._remove_geotransform() # add geolocation geoMeta = self.geolocation.data if len(geoMeta) > 0: self.dataset.SetMetadata(geoMeta, 'GEOLOCATION') # add GCPs geolocationMetadata = gdalSubDataset.GetMetadata('GEOLOCATION') xDatasetSource = geolocationMetadata['X_DATASET'] xDataset = gdal.Open(xDatasetSource) yDatasetSource = geolocationMetadata['Y_DATASET'] yDataset = gdal.Open(yDatasetSource) longitude = xDataset.ReadAsArray() latitude = yDataset.ReadAsArray() # estimate pixel/line step of the geolocation arrays pixelStep = int( ceil( float(gdalSubDataset.RasterXSize) / float(xDataset.RasterXSize))) lineStep = int( ceil( float(gdalSubDataset.RasterYSize) / float(xDataset.RasterYSize))) self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep)) # ==== ADD GCPs and Pojection ==== # estimate step of GCPs step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT)) step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT)) if str(title) == 'VIIRSN Level-2 Data': step0 = 64 self.logger.debug('gcpCount: >%s<, %d %d %f %d %d', title, latitude.shape[0], latitude.shape[1], GCP_COUNT, step0, step1) # generate list of GCPs dx = .5 dy = .5 gcps = [] k = 0 center_lon = 0 center_lat = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep + dx, i0 * lineStep + dy) self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY) gcps.append(gcp) center_lon += gcp.GCPX center_lat += gcp.GCPY k += 1 # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) self._remove_geolocation() # reproject GCPs center_lon /= k center_lat /= k srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % ( center_lon, center_lat) self.reproject_gcps(srs) # use TPS for reprojection self.tps = True # add NansenCloud metadata self.dataset.SetMetadataItem( 'time_coverage_start', (parse(gdalMetadata['time_coverage_start']).isoformat())) self.dataset.SetMetadataItem( 'time_coverage_end', (parse(gdalMetadata['time_coverage_stop']).isoformat())) instrument = gdalMetadata['Sensor Name'][1:-1] platform = {'A': 'AQUA', 'T': 'TERRA'}[gdalMetadata['Sensor Name'][-1]] mm = pti.get_gcmd_instrument(instrument) ee = pti.get_gcmd_platform(platform) self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): ''' Create CSKS VRT ''' if filename.split('/')[-1][0:4] != "CSKS": raise WrongMapperError # Get coordinates metadata = gdalMetadata['Estimated_Bottom_Left_Geodetic_Coordinates'] bottom_left_lon = float(metadata.split(' ')[1]) bottom_left_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Bottom_Right_Geodetic_Coordinates'] bottom_right_lon = float(metadata.split(' ')[1]) bottom_right_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Top_Left_Geodetic_Coordinates'] top_left_lon = float(metadata.split(' ')[1]) top_left_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Top_Right_Geodetic_Coordinates'] top_right_lon = float(metadata.split(' ')[1]) top_right_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Scene_Centre_Geodetic_Coordinates'] center_lon = float(metadata.split(' ')[1]) center_lat = float(metadata.split(' ')[0]) # Get sub-datasets subDatasets = gdalDataset.GetSubDatasets() # Get file names from dataset or subdataset if subDatasets.__len__() == 1: filenames = [filename] else: filenames = [f[0] for f in subDatasets] for i, elem in enumerate(filenames): if filenames[i][-3:] == 'QLK': filenames.pop(i) #print filenames subDataset = gdal.Open(filenames[0]) # generate list of GCPs gcps = [] # create GCP with X,Y,Z(?),pixel,line from lat/lon matrices gcp = gdal.GCP(float(bottom_left_lon), float(bottom_left_lat), 0, 0, 0) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 0, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(bottom_right_lon), float(bottom_right_lat), 0, subDataset.RasterXSize, 0) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 1, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(top_left_lon), float(top_left_lat), 0, 0, subDataset.RasterYSize) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 2, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(top_right_lon), float(top_right_lat), 0, subDataset.RasterXSize, subDataset.RasterYSize) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 3, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(center_lon), float(center_lat), 0, int(np.round(subDataset.RasterXSize / 2.)), int(round(subDataset.RasterYSize / 2.))) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 4, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) # append GCPs and lat/lon projection to the vsiDataset latlongSRS = osr.SpatialReference() latlongSRS.ImportFromProj4( "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") latlongSRSWKT = latlongSRS.ExportToWkt() # create empty VRT dataset with geolocation only # x_size, y_size, geo_transform, projection, gcps=None, gcp_projection='', **kwargs self._init_from_dataset_params( subDataset.RasterXSize, subDataset.RasterYSize, (0, 1, 0, subDataset.RasterYSize, 0, -1), latlongSRSWKT, gcps, latlongSRSWKT) #print self.filename # Read all bands later #band='S01' #res='SBI' # Use only full size "original" datasets for i, elem in enumerate(filenames): if filenames[i][-3:] == 'SBI': # Add real and imaginary raw counts as bands src = { 'SourceFilename': filenames[i], 'SourceBand': 1, 'DataType': gdal.GDT_Int16 } dst = { 'dataType': gdal.GDT_Float32, 'name': 'RawCounts_%s_real' % gdalMetadata[filenames[i][-7:-4] + '_Polarisation'] } self.create_band(src, dst) src = { 'SourceFilename': filenames[i], 'SourceBand': 2, 'DataType': gdal.GDT_Int16 } dst = { 'dataType': gdal.GDT_Float32, 'name': 'RawCounts_%s_imaginary' % gdalMetadata[filenames[i][-7:-4] + '_Polarisation'] } self.create_band(src, dst) self.dataset.FlushCache() for i, elem in enumerate(filenames): if filenames[i][-3:] == 'SBI': # Calculate sigma0 scaling factor Rref = float(gdalMetadata['Reference_Slant_Range']) Rexp = float(gdalMetadata['Reference_Slant_Range_Exponent']) alphaRef = float(gdalMetadata['Reference_Incidence_Angle']) F = float(gdalMetadata['Rescaling_Factor']) K = float(gdalMetadata[filenames[i][-7:-4] + '_Calibration_Constant']) Ftot = Rref**(2. * Rexp) Ftot *= np.sin(alphaRef * np.pi / 180.0) Ftot /= F**2. Ftot /= K #print Ftot src = [{ 'SourceFilename': self.filename, 'DataType': gdal.GDT_Float32, 'SourceBand': 2 * i + 1, 'ScaleRatio': np.sqrt(Ftot) }, { 'SourceFilename': self.filename, 'DataType': gdal.GDT_Float32, 'SourceBand': 2 * i + 2, 'ScaleRatio': np.sqrt(Ftot) }] dst = { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'RawcountsToSigma0_CosmoSkymed_SBI', 'polarisation': gdalMetadata[filenames[i][-7:-4] + '_Polarisation'], 'name': 'sigma0_%s' % gdalMetadata[filenames[i][-7:-4] + '_Polarisation'], 'SatelliteID': gdalMetadata['Satellite_ID'], 'dataType': gdal.GDT_Float32 } #'pass': gdalMetadata[''] # - I can't find this in the metadata... self.create_band(src, dst) self.dataset.FlushCache() self.dataset.SetMetadataItem( 'time_coverage_start', parse_time(gdalMetadata['Scene_Sensing_Start_UTC']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse_time(gdalMetadata['Scene_Sensing_Stop_UTC']).isoformat())
def __init__(self, filename, gdalDataset, gdalMetadata, product_type='RVL', GCP_COUNT=10, **kwargs): ''' Parameters ---------- product_type: string Sentinel-1 level-2 ocean product type/component, i.e. ocean swell spectra (OSW), ocean wind field (OWI), or radial surface velocity (RVL) (RVL is the default) GCP_COUNT : int number of GCPs along each dimention ''' fPathName, fExt = os.path.splitext(filename) # List of Sentinel-1 level-2 components unwanted_product_components = ['osw', 'owi', 'rvl'] # Remove requested 'product_type' from list of unwanted unwanted_product_components.pop( unwanted_product_components.index(product_type.lower())) # Check if it is Sentinel-1 (or ASAR) level-2 (in S1 data format) if not gdalMetadata or not 'NC_GLOBAL' in gdalMetadata.keys(): raise WrongMapperError(filename) else: title = gdalMetadata['NC_GLOBAL#TITLE'] # Raise error if it is not Sentinel-1 format if not 'Sentinel-1' or 'ASA' in title: raise WrongMapperError(filename) metadata = {} for key, val in gdalMetadata.iteritems(): new_key = key.split('#')[-1] metadata[new_key] = val subDatasets = gdalDataset.GetSubDatasets() filenames = [f[0] for f in subDatasets] rm_bands = [] # Find all data that is not relevant for the selected product type # and get bands of longitude, latitude and zero doppler time for i, f in enumerate(filenames): if f.split(':')[-1][:3] in unwanted_product_components: rm_bands.append(i) if 'Lon' in f.split(':')[-1]: lon_ds = gdal.Open(f) rm_bands.append(i) if 'Lat' in f.split(':')[-1]: lat_ds = gdal.Open(f) rm_bands.append(i) if 'ZeroDopplerTime' in f.split(':')[-1]: zdt_ds = gdal.Open(f) rm_bands.append(i) # Remove bands in rm_bands from the list of bands to add to the Nansat # object filenames = [f for i, f in enumerate(filenames) if not i in rm_bands] # ( # 'Lon' in f.split(':')[-1] or # 'Lat' in f.split(':')[-1] or # 'ZeroDopplerTime' in f.split(':')[-1] )] # create empty VRT dataset self._init_from_gdal_dataset(gdal.Open(subDatasets[0][0]), metadata=metadata) # The zero Doppler time grid is 3-dimensional - the last dimension is a # char array with the time as year, month, day, etc. # Will not bother with it yet... #for iBand in range(zdt_ds.RasterCount): # subBand = zdt_ds.GetRasterBand(iBand+1) XSize = lon_ds.RasterXSize YSize = lon_ds.RasterYSize # get projection from the lon and lat datasets longitude = lon_ds.ReadAsArray() latitude = lat_ds.ReadAsArray() # estimate step of GCPs step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT)) step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT)) self.logger.debug('gcpCount: >%s<, %d %d %f %d %d', title, latitude.shape[0], latitude.shape[1], GCP_COUNT, step0, step1) # estimate pixel/line step of the geolocation arrays pixelStep = 1 lineStep = 1 self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep)) # generate list of GCPs dx = .5 dy = .5 gcps = [] k = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep + dx, i0 * lineStep + dy) self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY) gcps.append(gcp) k += 1 # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) # define band specific parameters metaDict = [] geoFileDict = {} xDatasetSource = '' yDatasetSource = '' for i, filename in enumerate(filenames): band = gdal.Open(filename) # check that the band size is the same size as the latitude and # longitude grids if (band.RasterXSize != XSize or band.RasterYSize != YSize): raise IndexError(('Size of sub-dataset is different from size ' 'of longitude and latitude grids')) bandMetadata = band.GetMetadata() # generate src metadata src = {'SourceFilename': filename, 'SourceBand': 1} # Generate dst metadata short_name = filename.split(':')[-1] dst = { 'name': short_name, 'short_name': short_name, 'long_name': bandMetadata[short_name + '#long_name'], 'units': bandMetadata[short_name + '#units'], #'wkv': , } # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) metaDict = [] for i in range(self.dataset.RasterCount): if 'Nrcs' in self.dataset.GetRasterBand(i + 1).GetMetadata()['name']: metaDict.append({ 'src': { 'SourceFilename': (self.dataset.GetRasterBand( i + 1).GetMetadata()['SourceFilename']), 'SourceBand': 1 }, 'dst': { 'short_name': 'sigma0', 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'dB2pow', 'polarization': (self.dataset.GetMetadata()['POLARISATION']), 'suffix': self.dataset.GetMetadata()['POLARISATION'], 'dataType': 6, } }) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) # set time self.dataset.SetMetadataItem( 'time_coverage_start', parse(self.dataset.GetMetadata() ['SOURCE_ACQUISITION_UTC_TIME']).isoformat())
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT0=5, GCP_COUNT1=20, pixelStep=1, lineStep=1, **kwargs): ''' Create VIIRS VRT ''' if not 'GMTCO_npp_' in filename: raise WrongMapperError(filename) ifiledir = os.path.split(filename)[0] ifiles = glob.glob(ifiledir + 'SVM??_npp_d*_obpg_ops.h5') ifiles.sort() if not IMPORT_SCIPY: raise NansatReadError( 'VIIRS data cannot be read because scipy is not installed') viirsWavelengths = [ None, 412, 445, 488, 555, 672, 746, 865, 1240, 1378, 1610, 2250, 3700, 4050, 8550, 10736, 12013 ] # create empty VRT dataset with geolocation only xDatasetSource = ( 'HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Longitude' % filename) xDatasetBand = 1 xDataset = gdal.Open(xDatasetSource) self._init_from_gdal_dataset(xDataset) metaDict = [] for ifile in ifiles: ifilename = os.path.split(ifile)[1] print(ifilename) bNumber = int(ifilename[3:5]) print(bNumber) bWavelength = viirsWavelengths[bNumber] print(bWavelength) SourceFilename = ( 'HDF5:"%s"://All_Data/VIIRS-M%d-SDR_All/Radiance' % (ifile, bNumber)) print(SourceFilename) metaEntry = { 'src': { 'SourceFilename': SourceFilename, 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': str(bWavelength), 'suffix': str(bWavelength) } } metaDict.append(metaEntry) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) xVRTArray = xDataset.ReadAsArray() xVRTArray = gaussian_filter(xVRTArray, 5).astype('float32') xVRT = VRT.from_array(xVRTArray) yDatasetSource = ( 'HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Latitude' % filename) yDatasetBand = 1 yDataset = gdal.Open(yDatasetSource) yVRTArray = yDataset.ReadAsArray() yVRTArray = gaussian_filter(yVRTArray, 5).astype('float32') yVRT = VRT.from_array(yVRTArray) # estimate pixel/line step self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep)) # ==== ADD GCPs and Pojection ==== # get lat/lon matrices longitude = xVRT.dataset.GetRasterBand(1).ReadAsArray() latitude = yVRT.dataset.GetRasterBand(1).ReadAsArray() # estimate step of GCPs step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT0)) step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT1)) self.logger.debug('gcpCount: %d %d %d %d, %d %d', latitude.shape[0], latitude.shape[1], GCP_COUNT0, GCP_COUNT1, step0, step1) # generate list of GCPs gcps = [] k = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep, i0 * lineStep) self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY) gcps.append(gcp) k += 1 # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) # remove geolocation array self._remove_geolocation()
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT=30, **kwargs): ''' Create MODIS_L1 VRT ''' #list of available modis names:resolutions modisResolutions = { 'MYD02QKM': 250, 'MOD02QKM': 250, 'MYD02HKM': 500, 'MOD02HKM': 500, 'MYD021KM': 1000, 'MOD021KM': 1000 } #should raise error in case of not MODIS_L1 try: mResolution = modisResolutions[gdalMetadata["SHORTNAME"]] except: raise WrongMapperError # get 1st subdataset and parse to VRT.__init__() # for retrieving geo-metadata try: gdalSubDataset = gdal.Open(gdalDataset.GetSubDatasets()[0][0]) except (AttributeError, IndexError): raise WrongMapperError # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(gdalSubDataset) subDsString = 'HDF4_EOS:EOS_SWATH:"%s":MODIS_SWATH_Type_L1B:%s' #provide all mappings metaDict250SF = ['EV_250_RefSB'] metaDict250 = [{ 'src': { 'SourceFilename': subDsString % (filename, 'EV_250_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '645' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_250_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '858' } }] metaDict500SF = ['EV_250_Aggr500_RefSB', 'EV_500_RefSB'] metaDict500 = [{ 'src': { 'SourceFilename': subDsString % (filename, 'EV_250_Aggr500_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '645' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_250_Aggr500_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '858' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '469' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '555' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_RefSB'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1240' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_RefSB'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1640' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_RefSB'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '2130' } }] metaDict1000SF = [ 'EV_250_Aggr1km_RefSB', 'EV_500_Aggr1km_RefSB', 'EV_1KM_RefSB', 'EV_1KM_Emissive' ] metaDict1000 = [{ 'src': { 'SourceFilename': subDsString % (filename, 'EV_250_Aggr1km_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '645' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_250_Aggr1km_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '858' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '469' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '555' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1240' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1640' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_500_Aggr1km_RefSB'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '2130' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '412' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '443' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '488' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '531' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '551' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 6 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '667' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 7 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '667' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 8 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '678' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 9 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '678' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 10 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '748' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 11 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '869' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 12 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '905' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 13 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '936' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 14 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '940' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_RefSB'), 'SourceBand': 15 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '1375' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '3750' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 2 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '3959' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 3 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '3959' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 4 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '4050' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 5 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '4465' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 6 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '4515' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 7 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '6715' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 8 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '7325' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 9 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '8550' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 10 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '9730' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 11 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '11030' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 12 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '12020' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 13 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '13335' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 14 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '13635' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 15 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '13935' } }, { 'src': { 'SourceFilename': subDsString % (filename, 'EV_1KM_Emissive'), 'SourceBand': 16 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': '14235' } }] # get proper mapping depending on resolution metaDict = { 250: metaDict250, 500: metaDict500, 1000: metaDict1000, }[mResolution] # get proper mapping depending on resolution metaDictSF = { 250: metaDict250SF, 500: metaDict500SF, 1000: metaDict1000SF, }[mResolution] # read all scales/offsets rScales = {} rOffsets = {} for sf in metaDictSF: dsName = subDsString % (filename, sf) ds = gdal.Open(dsName) rScales[dsName] = list( map(float, ds.GetMetadataItem('radiance_scales').split(','))) rOffsets[dsName] = list( map(float, ds.GetMetadataItem('radiance_offsets').split(','))) self.logger.debug('radiance_scales: %s' % str(rScales)) # add 'band_name' to 'parameters' for bandDict in metaDict: SourceFilename = bandDict['src']['SourceFilename'] SourceBand = bandDict['src']['SourceBand'] bandDict['dst']['suffix'] = bandDict['dst']['wavelength'] scale = rScales[SourceFilename][SourceBand - 1] offset = rOffsets[SourceFilename][SourceBand - 1] self.logger.debug('band, scale, offset: %s_%d %s %s' % (SourceFilename, SourceBand, scale, offset)) bandDict['src']['ScaleRatio'] = scale bandDict['src']['ScaleOffset'] = offset # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) productDate = gdalMetadata["RANGEBEGINNINGDATE"] productTime = gdalMetadata["RANGEBEGINNINGTIME"] self._remove_geolocation() # set required metadata self.dataset.SetMetadataItem( 'time_coverage_start', (parse(gdalMetadata["RANGEBEGINNINGDATE"] + ' ' + gdalMetadata["RANGEBEGINNINGTIME"]).isoformat())) self.dataset.SetMetadataItem( 'time_coverage_end', (parse(gdalMetadata["RANGEENDINGDATE"] + ' ' + gdalMetadata["RANGEENDINGTIME"]).isoformat())) instrumentName = self.find_metadata(gdalMetadata, 'ASSOCIATEDINSTRUMENTSHORTNAME', 'MODIS') platformName = self.find_metadata(gdalMetadata, 'ASSOCIATEDPLATFORMSHORTNAME', 'AQUA') mm = pti.get_gcmd_instrument(instrumentName) ee = pti.get_gcmd_platform(platformName) self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) lonSubdataset = [ subdatasetName[0] for subdatasetName in gdalDataset.GetSubDatasets() if 'Longitude' in subdatasetName[1] ][0] latSubdataset = [ subdatasetName[0] for subdatasetName in gdalDataset.GetSubDatasets() if 'Latitude' in subdatasetName[1] ][0] lons = gdal.Open(lonSubdataset).ReadAsArray() lats = gdal.Open(latSubdataset).ReadAsArray() gcps = [] rows = range(0, lons.shape[0], lons.shape[0] / GCP_COUNT) cols = range(0, lons.shape[1], lons.shape[1] / GCP_COUNT) factor = self.dataset.RasterYSize / lons.shape[0] for r in rows: for c in cols: gcps.append( gdal.GCP(float(lons[r, c]), float(lats[r, c]), 0, factor * c + 0.5, factor * r + 0.5)) self.dataset.SetGCPs(gcps, self.dataset.GetGCPProjection()) self.tps = True
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT=10, bandNames=['VNIR_Band1', 'VNIR_Band2', 'VNIR_Band3N'], bandWaves=[560, 660, 820], **kwargs): ''' Create VRT Parameters ----------- GCP_COUNT : int number of GCPs along each dimention bandNames : list of string (band name) bandWaves : list of integer (waves corresponding to band name) Band name and waves -------------------- 'VNIR_Band3B' : 820, 'SWIR_Band4' : 1650, 'SWIR_Band5' : 2165, 'SWIR_Band6' : 2205, 'SWIR_Band7' : 2260, 'SWIR_Band8' : 2330, 'SWIR_Band9' : 2395, 'TIR_Band10' : 8300, 'TIR_Band11' : 8650, 'TIR_Band12' : 9100, 'TIR_Band13' : 10600, 'TIR_Band14' : 11300 ''' # check if it is ASTER L1A try: assert 'AST_L1A_' in filename shortName = gdalMetadata['INSTRUMENTSHORTNAME'] assert shortName == 'ASTER' except: raise WrongMapperError subDatasets = gdalDataset.GetSubDatasets() # find datasets for each band and generate metaDict metaDict = [] bandDatasetMask = 'HDF4_EOS:EOS_SWATH:"%s":%s:ImageData' for bandName, bandWave in zip(bandNames, bandWaves): metaEntry = { 'src': { 'SourceFilename': (bandDatasetMask % (filename, bandName)), 'SourceBand': 1, 'DataType': 6, }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'wavelength': str(bandWave), 'suffix': str(bandWave), } } metaDict.append(metaEntry) # create empty VRT dataset with geolocation only gdalSubDataset = gdal.Open(metaDict[0]['src']['SourceFilename']) self._init_from_gdal_dataset(gdalSubDataset, metadata=gdalSubDataset.GetMetadata()) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) # find largest lon/lat subdatasets latShape0 = 0 for subDataset in subDatasets: if 'Latitude' in subDataset[1]: ls = int(subDataset[1].strip().split('[')[1].split('x')[0]) if ls >= latShape0: latShape0 = ls latSubDS = subDataset[0] if 'Longitude' in subDataset[1]: ls = int(subDataset[1].strip().split('[')[1].split('x')[0]) if ls >= latShape0: latShape0 = ls lonSubDS = subDataset[0] self.logger.debug(latSubDS) self.logger.debug(lonSubDS) # get lat/lon matrices xDataset = gdal.Open(lonSubDS) yDataset = gdal.Open(latSubDS) longitude = xDataset.ReadAsArray() latitude = yDataset.ReadAsArray() step0 = longitude.shape[0] / GCP_COUNT step1 = longitude.shape[1] / GCP_COUNT # estimate pixel/line step pixelStep = int( ceil( float(gdalSubDataset.RasterXSize) / float(xDataset.RasterXSize))) lineStep = int( ceil( float(gdalSubDataset.RasterYSize) / float(xDataset.RasterYSize))) self.logger.debug('steps: %d %d %d %d' % (step0, step1, pixelStep, lineStep)) # generate list of GCPs gcps = [] k = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(longitude[i0, i1]) lat = float(latitude[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep, i0 * lineStep) self.logger.debug( '%d %d %d %f %f' % (k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY)) gcps.append(gcp) k += 1 # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) # Adding valid time to dataset self.dataset.SetMetadataItem( 'time_coverage_start', parse(gdalMetadata['FIRSTPACKETTIME']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse(gdalMetadata['LASTPACKETTIME']).isoformat()) mm = pti.get_gcmd_instrument('ASTER') ee = pti.get_gcmd_platform('TERRA') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))