#!/usr/bin/env python # Conversion of csv / space separated text file # into a vrt data set # Author: [email protected] import sys import os try: from osgeo import gdal except ImportError: import gdal gdal.AllRegister() argv = None nr = 0 header = 0 if argv is None: argv = sys.argv argv = gdal.GeneralCmdLineProcessor(argv) if argv is None: sys.exit(0) if argv < 14 or argv > 15: print "Usage: oft-csv2vrt <-l layer> <-x xcol> <-y ycol> <-v varcol> <-s separator> [-h header] <-i infile> <-o outfile>" sys.exit(0) # process arguments
def main(argv=None): global verbose, quiet verbose = 0 quiet = 0 names = [] format = None out_file = 'out.tif' ulx = None psize_x = None separate = 0 copy_pct = 0 nodata = None a_nodata = None create_options = [] pre_init = [] band_type = None createonly = 0 bTargetAlignedPixels = False start_time = time.time() gdal.AllRegister() if argv is None: argv = sys.argv argv = gdal.GeneralCmdLineProcessor(argv) if argv is None: sys.exit(0) # Parse command line arguments. i = 1 while i < len(argv): arg = argv[i] if arg == '-o': i = i + 1 out_file = argv[i] elif arg == '-v': verbose = 1 elif arg == '-q' or arg == '-quiet': quiet = 1 elif arg == '-createonly': createonly = 1 elif arg == '-separate': separate = 1 elif arg == '-seperate': separate = 1 elif arg == '-pct': copy_pct = 1 elif arg == '-ot': i = i + 1 band_type = gdal.GetDataTypeByName(argv[i]) if band_type == gdal.GDT_Unknown: print('Unknown GDAL data type: %s' % argv[i]) sys.exit(1) elif arg == '-init': i = i + 1 str_pre_init = argv[i].split() for x in str_pre_init: pre_init.append(float(x)) elif arg == '-n': i = i + 1 nodata = float(argv[i]) elif arg == '-a_nodata': i = i + 1 a_nodata = float(argv[i]) elif arg == '-f' or arg == '-of': i = i + 1 format = argv[i] elif arg == '-co': i = i + 1 create_options.append(argv[i]) elif arg == '-ps': psize_x = float(argv[i + 1]) psize_y = -1 * abs(float(argv[i + 2])) i = i + 2 elif arg == '-tap': bTargetAlignedPixels = True elif arg == '-ul_lr': ulx = float(argv[i + 1]) uly = float(argv[i + 2]) lrx = float(argv[i + 3]) lry = float(argv[i + 4]) i = i + 4 elif arg[:1] == '-': print('Unrecognized command option: %s' % arg) Usage() sys.exit(1) else: names.append(arg) i = i + 1 if len(names) == 0: print('No input files selected.') Usage() sys.exit(1) if format is None: format = GetOutputDriverFor(out_file) Driver = gdal.GetDriverByName(format) if Driver is None: print('Format driver %s not found, pick a supported driver.' % format) sys.exit(1) DriverMD = Driver.GetMetadata() if 'DCAP_CREATE' not in DriverMD: print( 'Format driver %s does not support creation and piecewise writing.\nPlease select a format that does, such as GTiff (the default) or HFA (Erdas Imagine).' % format) sys.exit(1) # Collect information on all the source files. file_infos = names_to_fileinfos(names) if ulx is None: ulx = file_infos[0].ulx uly = file_infos[0].uly lrx = file_infos[0].lrx lry = file_infos[0].lry for fi in file_infos: ulx = min(ulx, fi.ulx) uly = max(uly, fi.uly) lrx = max(lrx, fi.lrx) lry = min(lry, fi.lry) if psize_x is None: psize_x = file_infos[0].geotransform[1] psize_y = file_infos[0].geotransform[5] if band_type is None: band_type = file_infos[0].band_type # Try opening as an existing file. gdal.PushErrorHandler('CPLQuietErrorHandler') t_fh = gdal.Open(out_file, gdal.GA_Update) gdal.PopErrorHandler() # Create output file if it does not already exist. if t_fh is None: if bTargetAlignedPixels: ulx = math.floor(ulx / psize_x) * psize_x lrx = math.ceil(lrx / psize_x) * psize_x lry = math.floor(lry / -psize_y) * -psize_y uly = math.ceil(uly / -psize_y) * -psize_y geotransform = [ulx, psize_x, 0, uly, 0, psize_y] xsize = int((lrx - ulx) / geotransform[1] + 0.5) ysize = int((lry - uly) / geotransform[5] + 0.5) if separate != 0: bands = 0 for fi in file_infos: bands = bands + fi.bands else: bands = file_infos[0].bands t_fh = Driver.Create(out_file, xsize, ysize, bands, band_type, create_options) if t_fh is None: print('Creation failed, terminating gdal_merge.') sys.exit(1) t_fh.SetGeoTransform(geotransform) t_fh.SetProjection(file_infos[0].projection) if copy_pct: t_fh.GetRasterBand(1).SetRasterColorTable(file_infos[0].ct) else: if separate != 0: bands = 0 for fi in file_infos: bands = bands + fi.bands if t_fh.RasterCount < bands: print( 'Existing output file has less bands than the input files. You should delete it before. Terminating gdal_merge.' ) sys.exit(1) else: bands = min(file_infos[0].bands, t_fh.RasterCount) # Do we need to set nodata value ? if a_nodata is not None: for i in range(t_fh.RasterCount): t_fh.GetRasterBand(i + 1).SetNoDataValue(a_nodata) # Do we need to pre-initialize the whole mosaic file to some value? if pre_init is not None: if t_fh.RasterCount <= len(pre_init): for i in range(t_fh.RasterCount): t_fh.GetRasterBand(i + 1).Fill(pre_init[i]) elif len(pre_init) == 1: for i in range(t_fh.RasterCount): t_fh.GetRasterBand(i + 1).Fill(pre_init[0]) # Copy data from source files into output file. t_band = 1 if quiet == 0 and verbose == 0: progress(0.0) fi_processed = 0 for fi in file_infos: if createonly != 0: continue if verbose != 0: print("") print( "Processing file %5d of %5d, %6.3f%% completed in %d minutes." % (fi_processed + 1, len(file_infos), fi_processed * 100.0 / len(file_infos), int(round( (time.time() - start_time) / 60.0)))) fi.report() if separate == 0: for band in range(1, bands + 1): fi.copy_into(t_fh, band, band, nodata) else: for band in range(1, fi.bands + 1): fi.copy_into(t_fh, band, t_band, nodata) t_band = t_band + 1 fi_processed = fi_processed + 1 if quiet == 0 and verbose == 0: progress(fi_processed / float(len(file_infos))) # Force file to be closed. t_fh = None
def generate_data(self): ''' Description: Provides the main processing algorithm for building the Land Surface Temperature product. It produces the final LST product. ''' try: self.retrieve_metadata_information() except Exception: self.logger.exception('Failed reading input XML metadata file') raise # Register all the gdal drivers and choose the ENVI for our output gdal.AllRegister() envi_driver = gdal.GetDriverByName('ENVI') # Read the bands into memory # Landsat Radiance at sensor for thermal band self.logger.info('Loading intermediate thermal band data [{0}]'.format( self.thermal_name)) dataset = gdal.Open(self.thermal_name) x_dim = dataset.RasterXSize # They are all the same size y_dim = dataset.RasterYSize thermal_data = dataset.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim) # Atmospheric transmittance self.logger.info( 'Loading intermediate transmittance band data [{0}]'.format( self.transmittance_name)) dataset = gdal.Open(self.transmittance_name) trans_data = dataset.GetRasterBand(1).ReadAsArray(0, 0, x_dim, y_dim) # Atmospheric path radiance - upwelled radiance self.logger.info( 'Loading intermediate upwelled band data [{0}]'.format( self.upwelled_name)) dataset = gdal.Open(self.upwelled_name) upwelled_data = dataset.GetRasterBand(1).ReadAsArray( 0, 0, x_dim, y_dim) self.logger.info('Calculating surface radiance') # Surface radiance with np.errstate(invalid='ignore'): surface_radiance = (thermal_data - upwelled_data) / trans_data # Fix the no data locations no_data_locations = np.where(thermal_data == self.no_data_value) surface_radiance[no_data_locations] = self.no_data_value no_data_locations = np.where(trans_data == self.no_data_value) surface_radiance[no_data_locations] = self.no_data_value no_data_locations = np.where(upwelled_data == self.no_data_value) surface_radiance[no_data_locations] = self.no_data_value # Memory cleanup del thermal_data del trans_data del upwelled_data del no_data_locations # Downwelling sky irradiance self.logger.info( 'Loading intermediate downwelled band data [{0}]'.format( self.downwelled_name)) dataset = gdal.Open(self.downwelled_name) downwelled_data = dataset.GetRasterBand(1).ReadAsArray( 0, 0, x_dim, y_dim) # Landsat emissivity estimated from ASTER GED data self.logger.info( 'Loading intermediate emissivity band data [{0}]'.format( self.emissivity_name)) dataset = gdal.Open(self.emissivity_name) emissivity_data = dataset.GetRasterBand(1).ReadAsArray( 0, 0, x_dim, y_dim) # Save for the output product ds_srs = osr.SpatialReference() ds_srs.ImportFromWkt(dataset.GetProjection()) ds_transform = dataset.GetGeoTransform() # Memory cleanup del dataset # Estimate Earth-emitted radiance by subtracting off the reflected # downwelling component radiance = (surface_radiance - (1.0 - emissivity_data) * downwelled_data) # Account for surface emissivity to get Plank emitted radiance self.logger.info('Calculating Plank emitted radiance') with np.errstate(invalid='ignore'): radiance_emitted = radiance / emissivity_data # Fix the no data locations no_data_locations = np.where(surface_radiance == self.no_data_value) radiance_emitted[no_data_locations] = self.no_data_value no_data_locations = np.where(downwelled_data == self.no_data_value) radiance_emitted[no_data_locations] = self.no_data_value no_data_locations = np.where(emissivity_data == self.no_data_value) radiance_emitted[no_data_locations] = self.no_data_value # Memory cleanup del downwelled_data del emissivity_data del surface_radiance del radiance del no_data_locations # Use Brightness Temperature LUT to get skin temperature # Read the correct one for what we are processing if self.satellite == 'LANDSAT_8': self.logger.info('Using Landsat 8 Brightness Temperature LUT') bt_name = 'L8_Brightness_Temperature_LUT.txt' elif self.satellite == 'LANDSAT_7': self.logger.info('Using Landsat 7 Brightness Temperature LUT') bt_name = 'L7_Brightness_Temperature_LUT.txt' elif self.satellite == 'LANDSAT_5': self.logger.info('Using Landsat 5 Brightness Temperature LUT') bt_name = 'L5_Brightness_Temperature_LUT.txt' elif self.satellite == 'LANDSAT_4': self.logger.info('Using Landsat 4 Brightness Temperature LUT') bt_name = 'L4_Brightness_Temperature_LUT.txt' bt_data = np.loadtxt(os.path.join(self.lst_data_dir, bt_name), dtype=float, delimiter=' ') bt_radiance_lut = bt_data[:, 1] bt_temp_lut = bt_data[:, 0] self.logger.info('Generating LST results') lst_data = np.interp(radiance_emitted, bt_radiance_lut, bt_temp_lut) # Scale the result lst_data = lst_data * MULT_FACTOR # Add the fill and scan gaps back into the results, since they may # have been lost self.logger.info('Adding fill and data gaps back into the Land' ' Surface Temperature results') # Fix the no data locations no_data_locations = np.where(radiance_emitted == self.no_data_value) lst_data[no_data_locations] = self.no_data_value # Memory cleanup del radiance_emitted del no_data_locations product_id = self.xml_filename.split('.xml')[0] lst_img_filename = ''.join([product_id, '_lst', '.img']) lst_hdr_filename = ''.join([product_id, '_lst', '.hdr']) lst_aux_filename = ''.join([lst_img_filename, '.aux', '.xml']) self.logger.info('Creating {0}'.format(lst_img_filename)) util.Geo.generate_raster_file(envi_driver, lst_img_filename, lst_data, x_dim, y_dim, ds_transform, ds_srs.ExportToWkt(), self.no_data_value, gdal.GDT_Int16) self.logger.info('Updating {0}'.format(lst_hdr_filename)) util.Geo.update_envi_header(lst_hdr_filename, self.no_data_value) # Memory cleanup del ds_srs del ds_transform # Remove the *.aux.xml file generated by GDAL if os.path.exists(lst_aux_filename): os.unlink(lst_aux_filename) self.logger.info('Adding {0} to {1}'.format(lst_img_filename, self.xml_filename)) # Add the estimated Land Surface Temperature product to the metadata espa_xml = metadata_api.parse(self.xml_filename, silence=True) bands = espa_xml.get_bands() sensor_code = product_id[0:4] # Find the TOA Band 1 to use for the specific band details base_band = None for band in bands.band: if band.product == 'toa_refl' and band.name == 'toa_band1': base_band = band if base_band is None: raise Exception('Failed to find the TOA BLUE band' ' in the input data') lst_band = metadata_api.band(product='lst', source='toa_refl', name='land_surface_temperature', category='image', data_type='INT16', scale_factor=SCALE_FACTOR, add_offset=0, nlines=base_band.get_nlines(), nsamps=base_band.get_nsamps(), fill_value=str(self.no_data_value)) lst_band.set_short_name('{0}LST'.format(sensor_code)) lst_band.set_long_name('Land Surface Temperature') lst_band.set_file_name(lst_img_filename) lst_band.set_data_units('temperature (kelvin)') pixel_size = metadata_api.pixel_size(base_band.pixel_size.x, base_band.pixel_size.x, base_band.pixel_size.units) lst_band.set_pixel_size(pixel_size) lst_band.set_resample_method('none') valid_range = metadata_api.valid_range(min=1500, max=3730) lst_band.set_valid_range(valid_range) # Set the date, but first clean the microseconds off of it production_date = (datetime.datetime.strptime( datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'), '%Y-%m-%dT%H:%M:%S')) lst_band.set_production_date(production_date) lst_band.set_app_version(util.Version.app_version()) bands.add_band(lst_band) # Write the XML metadata file out with open(self.xml_filename, 'w') as metadata_fd: metadata_api.export(metadata_fd, espa_xml) # Memory cleanup del lst_band del bands del espa_xml del lst_data
def mmse_filter(infile, m, dims=None): gdal.AllRegister() inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount if dims == None: dims = [0,0,cols,rows] x0,y0,cols,rows = dims path = os.path.dirname(infile) basename = os.path.basename(infile) root, ext = os.path.splitext(basename) outfile = path + '/' + root + '_mmse' + ext # get filter weights from span image b = np.ones((rows,cols)) band = inDataset.GetRasterBand(1) span = band.ReadAsArray(x0,y0,cols,rows).ravel() if bands==9: band = inDataset.GetRasterBand(6) span += band.ReadAsArray(x0,y0,cols,rows).ravel() band = inDataset.GetRasterBand(9) span += band.ReadAsArray(x0,y0,cols,rows).ravel() elif bands==4: band = inDataset.GetRasterBand(4) span += band.ReadAsArray(x0,y0,cols,rows).ravel() edge_idx = np.zeros((rows,cols),dtype=int) print '=========================' print ' MMSE_FILTER' print '=========================' print time.asctime() print 'infile: %s'%infile print 'number of looks: %i'%m print 'Determining filter weights from span image' start = time.time() print 'row: ', sys.stdout.flush() for j in range(3,rows-3): if j%50 == 0: print '%i '%j, sys.stdout.flush() windex = get_windex(j,cols) for i in range(3,cols-3): wind = np.reshape(span[windex],(7,7)) # 3x3 compression w = congrid.congrid(wind,(3,3),method='spline',centre=True) # get appropriate edge mask es = [np.sum(edges[p]*w) for p in range(4)] idx = np.argmax(es) if idx == 0: if np.abs(w[1,1]-w[1,0]) < np.abs(w[1,1]-w[1,2]): edge_idx[j,i] = 0 else: edge_idx[j,i] = 4 elif idx == 1: if np.abs(w[1,1]-w[2,0]) < np.abs(w[1,1]-w[0,2]): edge_idx[j,i] = 1 else: edge_idx[j,i] = 5 elif idx == 2: if np.abs(w[1,1]-w[0,1]) < np.abs(w[1,1]-w[2,1]): edge_idx[j,i] = 6 else: edge_idx[j,i] = 2 elif idx == 3: if np.abs(w[1,1]-w[0,0]) < np.abs(w[1,1]-w[2,2]): edge_idx[j,i] = 7 else: edge_idx[j,i] = 3 edge = templates[edge_idx[j,i]] wind = wind.ravel()[edge] gbar = np.mean(wind) varg = np.var(wind) if varg > 0: b[j,i] = np.max( ((1.0 - gbar**2/(varg*m))/(1.0+1.0/m), 0.0) ) windex += 1 print ' done' # filter the image outim = np.zeros((rows,cols),dtype=np.float32) driver = inDataset.GetDriver() outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32) geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0*gt[1] gt[3] = gt[3] + y0*gt[5] outDataset.SetGeoTransform(tuple(gt)) projection = inDataset.GetProjection() if projection is not None: outDataset.SetProjection(projection) print 'Filtering covariance matrix elememnts' for k in range(1,bands+1): print 'band: %i'%(k) band = inDataset.GetRasterBand(k) band = band.ReadAsArray(0,0,cols,rows) gbar = band*0.0 # get window means for j in range(3,rows-3): windex = get_windex(j,cols) for i in range(3,cols-3): wind = band.ravel()[windex] edge = templates[edge_idx[j,i]] wind = wind[edge] gbar[j,i] = np.mean(wind) windex += 1 # apply adaptive filter and write to disk outim = np.reshape(gbar + b*(band-gbar),(rows,cols)) outBand = outDataset.GetRasterBand(k) outBand.WriteArray(outim,0,0) outBand.FlushCache() outDataset = None print 'result written to: '+outfile print 'elapsed time: '+str(time.time()-start)
def main(): usage = ''' Usage: ------------------------------------------------ Calculate the ratio of two polSAR images python %s [OPTIONS] filename1 filename2 Options: -h this help -p <list> band positions e.g. -p [1,2,3,4,5,7] -d <list> spatial subset ''' options, args = getopt.getopt(sys.argv[1:], 'hd:p:') dims = None pos = None for option, value in options: if option == '-h': print(usage) return elif option == '-d': dims = eval(value) elif option == '-p': pos = eval(value) if len(args) == 2: fn1 = args[0] fn2 = args[1] else: print('Incorrect number of arguments') print(usage) sys.exit(1) path = os.path.dirname(fn1) basename1 = os.path.basename(fn1) basename2 = os.path.basename(fn2) root1, ext = os.path.splitext(basename1) root2, ext = os.path.splitext(basename2) outfile = path + '/(' + root1 + '-over-' + root2 + ')' + ext gdal.AllRegister() inDataset = gdal.Open(fn1, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount if dims: x0, y0, cols, rows = dims else: x0 = 0 y0 = 0 if pos is not None: bands = len(pos) else: pos = range(1, bands + 1) g1 = np.zeros((cols * rows, bands)) k = 0 for b in pos: g1[:, k] = np.nan_to_num( inDataset.GetRasterBand(b).ReadAsArray(x0, y0, cols, rows).ravel()) k += 1 inDataset = gdal.Open(fn2, GA_ReadOnly) g2 = np.zeros((cols * rows, bands)) k = 0 for b in pos: g2[:, k] = np.nan_to_num( inDataset.GetRasterBand(b).ReadAsArray(x0, y0, cols, rows).ravel()) # g2[:,k] = np.nan_to_num(inDataset.GetRasterBand(b).ReadAsArray(0,0,cols,rows).ravel()) k += 1 g2 = np.where(g2 == 0, 0.0001, g2) ratio = g1 / g2 driver = inDataset.GetDriver() outDataset = driver.Create(outfile, cols, rows, bands, GDT_Float32) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0 * gt[1] gt[3] = gt[3] + y0 * gt[5] outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) for k in range(bands): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(np.reshape(ratio[:, k], (rows, cols)), 0, 0) outBand.FlushCache() outDataset = None print('Ratio image written to: %s' % outfile)
def main(): usage = ''' Usage: ------------------------------------------------ python %s [-h] [-d dims] filename enl Run a gamma Map filter in the diagonal elements of a C or T matrix ------------------------------------------------''' % sys.argv[0] options, args = getopt.getopt(sys.argv[1:], 'hd:') dims = None for option, value in options: if option == '-h': print usage return elif option == '-d': dims = eval(value) if len(args) != 2: print 'Incorrect number of arguments' print usage sys.exit(1) infile = args[0] m = int(args[1]) gdal.AllRegister() inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount if dims == None: dims = [0, 0, cols, rows] x0, y0, cols, rows = dims path = os.path.abspath(infile) dirn = os.path.dirname(path) basename = os.path.basename(infile) root, ext = os.path.splitext(basename) outfile = dirn + '/' + root + '_gamma' + ext # process diagonal bands only driver = inDataset.GetDriver() if bands == 9: outDataset = driver.Create(outfile, cols, rows, 3, GDT_Float32) inimage = np.zeros((3, rows, cols)) band = inDataset.GetRasterBand(1) inimage[0] = band.ReadAsArray(x0, y0, cols, rows) band = inDataset.GetRasterBand(6) inimage[1] = band.ReadAsArray(x0, y0, cols, rows) band = inDataset.GetRasterBand(9) inimage[2] = band.ReadAsArray(x0, y0, cols, rows) elif bands == 4: outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32) inimage = np.zeros((2, rows, cols)) band = inDataset.GetRasterBand(1) inimage[0] = band.ReadAsArray(x0, y0, cols, rows) band = inDataset.GetRasterBand(4) inimage[1] = band.ReadAsArray(x0, y0, cols, rows) else: outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32) inimage = inDataset.GetRasterBand(1) outimage = np.copy(inimage) print '=========================' print ' GAMMA MAP FILTER' print '=========================' print time.asctime() print 'infile: %s' % infile print 'equivalent number of looks: %i' % m start = time.time() if bands == 9: for k in range(3): outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m) elif bands == 4: for k in range(2): outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m) else: outimage = gamma_filter(0, inimage, outimage, rows, cols, m) geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0 * gt[1] gt[3] = gt[3] + y0 * gt[5] outDataset.SetGeoTransform(tuple(gt)) projection = inDataset.GetProjection() if projection is not None: outDataset.SetProjection(projection) if bands == 9: for k in range(3): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(outimage[k], 0, 0) outBand.FlushCache() elif bands == 4: for k in range(2): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(outimage[k], 0, 0) outBand.FlushCache() else: outBand = outDataset.GetRasterBand(1) outBand.WriteArray(outimage, 0, 0) outBand.FlushCache() outDataset = None print 'result written to: ' + outfile print 'elapsed time: ' + str(time.time() - start)