def cutFiles(arg): if len(arg) == 1: print("Nothing to do!!! Exiting...") return (0) file1 = arg[0] # Open file1, get projection and pixsize dst1 = gdal.Open(file1) p1 = dst1.GetProjection() # Find the largest pixel size of all scenes pixSize = getPixSize(arg[0]) for x in range(len(arg) - 1): tmp = getPixSize(arg[x + 1]) pixSize = max(pixSize, tmp) # Make sure that UTM projections match ptr = p1.find("UTM zone ") if ptr != -1: (zone1, hemi) = [ t(s) for t, s in zip((int, str), re.search("(\d+)(.)", p1[ptr:]).groups()) ] for x in range(len(arg) - 1): file2 = arg[x + 1] # Open up file2, get projection dst2 = gdal.Open(file2) p2 = dst2.GetProjection() # Cut the UTM zone out of projection2 ptr = p2.find("UTM zone ") zone2 = re.search("(\d+)", p2[ptr:]).groups() zone2 = int(zone2[0]) if zone1 != zone2: print("Projections don't match... Reprojecting %s" % file2) if hemi == "N": proj = ('EPSG:326%02d' % int(zone1)) else: proj = ('EPSG:327%02d' % int(zone1)) print(" reprojecting post image") print(" proj is %s" % proj) name = file2.replace(".tif", "_reproj.tif") gdal.Warp(name, file2, dstSRS=proj, xRes=pixSize, yRes=pixSize) arg[x + 1] = name # Find the overlap between all scenes coords = getCorners(arg[0]) for x in range(len(arg) - 1): coords = getOverlap(coords, arg[x + 1]) # Check to make sure there was some overlap print("Clipping coordinates: {}".format(coords)) diff1 = (coords[2] - coords[0]) / pixSize diff2 = (coords[3] - coords[1]) / pixSize * -1.0 print("Found overlap size of {}x{}".format(int(diff1), int(diff2))) if diff1 < 1 or diff2 < 1: print("ERROR: There was no overlap between scenes") exit(1) # Finally, clip all scenes to the overlap region at the largest pixel size lst = list(coords) tmp = lst[3] lst[3] = lst[1] lst[1] = tmp coords = tuple(lst) print("Pixsize : x = {} y = {}".format(pixSize, -1 * pixSize)) for x in range(len(arg)): file1 = arg[x] file1_new = file1.replace('.tif', '_clip.tif') print(" clipping file {} to create file {}".format( file1, file1_new)) # dst_d1 = gdal.Translate(file1_new,file1,projWin=coords,xRes=pixSize,yRes=pixSize,creationOptions = ['COMPRESS=LZW']) gdal.Warp(file1_new, file1, outputBounds=coords, xRes=pixSize, yRes=-1 * pixSize, creationOptions=['COMPRESS=LZW'])
os.chdir(company.parameters['output_directory']) # Export 2D proportions matrix as map output_name = 'DD_' + company.name + '_map' + '.tif' output_raster = gdal.GetDriverByName('GTiff').Create(output_name, ncols, nrows, 1, gdal.GDT_Float32) output_raster.SetGeoTransform(geotransform) # Specify file coordinates srs = osr.SpatialReference() # Establish coordinate encoding srs.ImportFromEPSG(4326) # Specify WGS84 lat/long output_raster.SetProjection(srs.ExportToWkt()) # Exports the coordinate system to the file output_raster.GetRasterBand(1).WriteArray(DD_output) # Writes my array to the raster output_raster = None # project the raster iras = gdal.Open(output_name) prj_output_name = 'DD_' + company.name + '_map' +'_prj' + '.tif' gdal.Warp(prj_output_name,iras,dstSRS = "EPSG:"+str(epsg)) # Export 2D MCB matrix as map output_name = 'MCB_' + company.name + '_map' + '.tif' output_raster = gdal.GetDriverByName('GTiff').Create('output_name, ncols, nrows, 1, gdal.GDT_Float32) output_raster.SetGeoTransform(geotransform) # Specify file coordinates srs = osr.SpatialReference() # Establish coordinate encoding srs.ImportFromEPSG(4326) # Specify WGS84 lat/long output_raster.SetProjection(srs.ExportToWkt()) # Exports the coordinate system to the file output_raster.GetRasterBand(1).WriteArray(MCB_output) # Writes my array to the raster output_raster = None # project the raster iras = gdal.Open(output_name) prj_output_name = 'MCB_' + company.name + '_map' +'_prj' + '.tif'
product_request.save_data() for i in [10, 20, 60]: path = 'A:\lab4\S2A_MSIL2A_20190821T085601_N0213_R007_T36UUA_20190821T115206.SAFE\GRANULE\L2A_T36UUA_A021740_20190821T085815\IMG_DATA\R{}m\\'.format( i) in1 = glob.glob(str(path) + ('*B02_{}m.jp2').format(i)) in2 = glob.glob(str(path) + '*B03_{}m.jp2'.format(i)) in3 = glob.glob(str(path) + '*B04_{}m.jp2'.format(i)) in4 = glob.glob(str(path) + '*_*_*8*_{}m.jp2'.format(i)) gm.main([ '', '-separate', '-o', 'AR{}.tif'.format(i), in1[0], in2[0], in3[0], in4[0] ]) for i in [1, 2, 6]: gdal.Warp('12AR{}0.tif'.format(i), '1AR{}0.tif'.format(i), dstSRS="EPSG:4326") gdal.Warp('final.tif', [ 'proek_AR10.tif', 'proek_AR20.tif', 'proek_AR60.tif', 'proek_BR10.tif', 'proek_BR20.tif', 'proek_BR60.tif' ]) gdal.Warp('WrapedImg.tif', 'AllInOne.tif', format='GTiff', cutlineDSName='Kyiv_regions.shp', cutlineLayer='extent', cropToCutline=True, dstNodata=0)
def convert_files(s1aFlag, proj=None, res=30): makeKMZ("filt_topophase.unw.geo", "unw") shutil.move("unw.kmz", "colorized_unw.kmz") makeKMZ("filt_topophase.flat.geo", "col") shutil.move("col.kmz", "color.kmz") gcsname = "tmp_gcs.tif" # Create the phase image if proj is None: gdal.Translate("phase.tif", "filt_topophase.unw.geo", bandList=[2], creationOptions=['COMPRESS=PACKBITS']) shutil.copy("phase.tif", gcsname) else: print("Creating tmp.tif") gdal.Translate("tmp.tif", "filt_topophase.unw.geo.vrt", bandList=[2], creationOptions=['COMPRESS=PACKBITS']) print("phase.tif") gdal.Warp("phase.tif", "tmp.tif", dstSRS=proj, xRes=res, yRes=res, resampleAlg="cubic", dstNodata=0, creationOptions=['COMPRESS=LZW']) print("mv tmp.tif {}".format(gcsname)) shutil.copy("tmp.tif", gcsname) # os.remove("tmp.tif") print("Creating browse image colorized_unw.png") create_browse("unw.png", "colorized_unw.png", "colorized_unw.png.aux.xml", gcsname, proj, 1024) create_browse("unw.png", "colorized_unw_large.png", "colorized_unw_large.png.aux.xml", gcsname, proj, 2048) print("Creating browse image color.png") create_browse("col.png", "color.png", "color.png.aux.xml", gcsname, proj, 1024) print("Creating browse image color_large.png") create_browse("col.png", "color_large.png", "color_large.png.aux.xml", gcsname, proj, 2048) # Create the amplitude image if proj is None: gdal.Translate("amp.tif", "filt_topophase.unw.geo", bandList=[1], creationOptions=['COMPRESS=PACKBITS']) else: gdal.Translate("tmp.tif", "filt_topophase.unw.geo.vrt", bandList=[1], creationOptions=['COMPRESS=PACKBITS']) gdal.Warp("amp.tif", "tmp.tif", dstSRS=proj, xRes=res, yRes=res, resampleAlg="cubic", dstNodata=0, creationOptions=['COMPRESS=LZW']) os.remove("tmp.tif") # Create the coherence image if proj is None: gdal.Translate("coherence.tif", "phsig.cor.geo", creationOptions=['COMPRESS=PACKBITS']) else: gdal.Translate("tmp.tif", "phsig.cor.geo.vrt", creationOptions=['COMPRESS=PACKBITS']) gdal.Warp("coherence.tif", "tmp.tif", dstSRS=proj, xRes=res, yRes=res, resampleAlg="cubic", dstNodata=0, creationOptions=['COMPRESS=LZW']) os.remove("tmp.tif")
temp2.SetGeoTransform(temp1.GetGeoTransform()) temp2.SetProjection(temp1.GetProjection()) temp2_band = temp2.GetRasterBand(1) gdal.ComputeProximity(temp1_band, temp2_band, ["VALUES=1"]) temp2_band.FlushCache() # Clip to region area warp_options = gdal.WarpOptions( outputBounds=match_ds.rio.bounds(), creationOptions=["COMPRESS=LZW"], outputType=gdal.GDT_Int16, dstNodata=NODATA_VALUE, cutlineDSName=region.get("path"), ) dst_fn = os.path.join(output_folder, f"{feature.get('name')}_proximity.tif") temp3 = gdal.Warp(dst_fn, temp2, options=warp_options) stack.append(temp3.ReadAsArray()) stack = np.stack(stack) stack = np.ma.array(stack, mask=(stack == NODATA_VALUE)) arr = stack.min(axis=0) arr = arr.filled(NODATA_VALUE) combined_fn = os.path.join(output_folder, "comb_proximity.tif") with rasterio.open( combined_fn, "w", driver="GTiff", width=match_ds.rio.width, height=match_ds.rio.height, count=1,
def match_netcdf_to_data(src_filename,match_filename,dst_filename,year,\ country_code=None,shpfile=None,force=False,\ nodata=-300,frmat='GTiff',verbose=False): ''' see : https://stackoverflow.com/questions/10454316/ how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python ''' ''' Get the projection, geotransform and dataset size that we want to match to ''' if verbose: print(f'getting info from match file {match_filename}') match_ds = gdal.Open(match_filename, gdalconst.GA_ReadOnly) match_proj = match_ds.GetProjection() match_geotrans = match_ds.GetGeoTransform() wide = match_ds.RasterXSize high = match_ds.RasterYSize # close the file -- we dont need it any more del match_ds ''' access information from source ''' if verbose: print(f'getting info from source netcdf file {src_filename}') try: src_dataname = 'NETCDF:"' + src_filename + '":t2m' src = gdal.Open(src_dataname, gdalconst.GA_ReadOnly) except: if verbose: print('failed') return (None) # get meta data meta = gdal.Open(src_filename, gdalconst.GA_ReadOnly).GetMetadata() extent = [match_geotrans[0],match_geotrans[0]+match_geotrans[1]*wide,\ match_geotrans[3]+match_geotrans[5]*high,match_geotrans[3]] # get time info timer = np.array([(datetime(1900,1,1) + timedelta(days=float(i)/24.)) \ for i in meta['NETCDF_DIM_time_VALUES'][1:-1].split(',')]) if (not Path(dst_filename).exists()) or force: ''' Get geotrans, proj, data type and number of bands from source dataset ''' band1 = src.GetRasterBand(1) src_geotrans = src.GetGeoTransform() src_proj = src.GetProjection() nbands = src.RasterCount src_format = band1.DataType dst = gdal.GetDriverByName('MEM').Create(\ '', wide, high, \ nbands, src_format) dst.SetGeoTransform(match_geotrans) dst.SetProjection(match_proj) if verbose: print(f'reprojecting ...') # Output / destination _ = gdal.ReprojectImage(src, dst, \ src_proj, \ match_proj,\ gdalconst.GRA_Bilinear ) if verbose: print(f'cropping to {country_code:s} ...') done = gdal.Warp(dst_filename, dst, format=frmat, dstNodata=nodata, cutlineDSName=shpfile, cutlineWhere=f"FIPS='{country_code:s}'", cropToCutline=True) del dst return (timer, dst_filename, extent)
def warp(src, dst, dtype="Float32", template=None, overwrite=False, compress=None, **kwargs): """ Warp a raster to a new geometry. Parameters ---------- src : str Path to source raster file. dst : str Path to target raster file. dtype : str | gdal object GDAL data type. Can be a string or a gdal type object (e.g. gdal.GDT_Float32, "GDT_Float32", "float32"). Available GDAL data types and descriptions can be found in the GDAL_TYPES dictionary. template : str Path to a raster file with desired target raster geometry, crs, resolution, and extent values. This will overwrite other arguments provided for these parameters. Template-derived arguments will overwrite **kwargs. overwrite : boolean compress : str A compression technique. Available options are "DEFLATE", "JPEG", "LZW" **kwargs Any available key word arguments for gdalwarp. Available options and descriptions can be found using gdal_options("warp"). Returns ------- None. Example: warp(src="/Users/twillia2/Box/WETO 1.2/data/rasters/agcounty_product.tif", dst="/Users/twillia2/Box/WETO 1.2/data/rasters/test.tif", template="/Users/twillia2/Box/WETO 1.2/data/rasters/albers/acre/cost_codes_ac.tif", dstSRS="epsg:102008") """ # Create progress callback - these behave differently by module def warp_progress(percent, message, unknown): """A progress callback that recreates the gdal printouts.""" # We don't need the message or unknown objects del message, unknown # Between numeric printouts we need three dots dots = [[str(i) + d for d in ["2", "5", "8"]] for i in range(10)] dots = [int(l) for sl in dots for l in sl] # If divisible by ten, print the number if percent % 10 == 0 and percent != 0: print("{}".format(percent), end="") # If one of three numbers between multiples of 10, print a dot elif percent in dots: print(".", end="") return 1 # Overwrite existing file if os.path.exists(dst): if overwrite: if os.path.isfile(dst): os.remove(dst) else: shutil.rmtree(dst) else: print(dst + " exists, use overwrite=True to replace this file.") return # Specifying data types shouldn't be so difficult if isinstance(dtype, str): dtype = dtype.lower().replace("gdt_", "") try: dtype = GDAL_TYPEMAP[dtype]["type"] except KeyError: print("\n'" + dtype + "' is not an available data type. " "Choose a value from this list:") print(str(list(GDAL_TYPEMAP.keys()))) # Create a spatial reference object spatial_ref = osr.SpatialReference() # If a template is provided, use its geometry for target figures if template: temp = gdal.Open(template) spatial_ref.ImportFromWkt(temp.GetProjection()) srs = spatial_ref.ExportToProj4() width = temp.RasterXSize # consider using these warp options height = temp.RasterYSize transform = temp.GetGeoTransform() xmin, xres, xrot, ymax, yrot, yres = transform xs = [xmin + xres * i for i in range(width)] ys = [ymax + yres * i for i in range(height)] xmax = max(xs) + 0.5*xres ymax = ymax + 0.5*xres ymin = min(ys) extent = [xmin, ymin, xmax, ymax] kwargs["dstSRS"] = srs kwargs["outputBounds"] = extent kwargs["xRes"] = transform[1] kwargs["yRes"] = transform[-1] # careful here kwargs["outputType"] = dtype elif not kwargs: print("No warp options provided.") gdal_options("warp") return # Get source srs source = gdal.Open(src) spatial_ref.ImportFromWkt(source.GetProjection()) srs = spatial_ref.ExportToProj4() kwargs["srcSRS"] = srs # Use the progress callback kwargs["callback"] = gdal_progress # Compress if compress: kwargs["creationOptions"] = ["COMPRESS=" + compress] # Check Options: https://gdal.org/python/osgeo.gdal-module.html#WarpOptions ops = gdal_options("warp", **kwargs) # Call print("Processing " + dst + " :") ds = gdal.Warp(dst, src, options=ops) del ds
parser = argparse.ArgumentParser(description= ('This script merge separate field zones into large raster file')) parser.add_argument('-i', required=True, metavar='input folder', help='Input folder with separate zones') parser.add_argument('-o', required=True, metavar='output raster', help='Output raster') if (len(sys.argv) == 1) : parser.print_usage() exit(0) args = parser.parse_args() input_folder = args.i output_merged_raster = args.o if not os.path.exists(input_folder): print("ERROR: path doesn't exist: " + input_folder) exit(1) os.chdir(input_folder) zone_list = [os.path.join(input_folder,f) for f in glob.glob('*.tif')] gdal.Warp(output_merged_raster, zone_list, format = 'GTiff', srcNodata=0, dstNodata=0 )
input = 'NPP_21616_151230010535.tif' #output = ds = gdal.Open(input) if ds is None: print 'Unable to open INPUT.tif' sys.exit(1) #num = ds.RasterCount #print 'Number of bands:', num #if num > 1: # print 'Leave one band' # ds = gdal.Translate('output.tif', ds, bandList = [1]) # input = 'output.tif' #print input #ds = gdal.Open('output.tif') #srcband = ds.GetRasterBand(1) #print srcband #(min,max) = srcband.ComputeRasterMinMax() #print 'MIN', min, 'MAX', max #if min == 0: print '0' dstDS = gdal.Warp('outputWARP.tif', ds, dstAlpha=True, srcNodata=0) #else: # print '255' # dstDS = gdal.Warp('outputWARP.tif', ds, dstAlpha=True, srcNodata=255)
def test_gdalwarp_lib_128(): mem_ds = gdal.GetDriverByName('MEM').Create('', 1177, 4719) rpc = [ "HEIGHT_OFF=109", "LINE_NUM_COEFF=-0.001245683 -0.09427649 -1.006342 -1.954469e-05 0.001033926 2.020534e-08 -3.845472e-07 -0.002075817 0.0005520694 0 -4.642442e-06 -3.271793e-06 2.705977e-05 -7.634384e-07 -2.132832e-05 -3.248862e-05 -8.17894e-06 -3.678094e-07 2.002032e-06 3.693162e-08", "LONG_OFF=7.1477", "SAMP_DEN_COEFF=1 0.01415176 -0.003715018 -0.001205632 -0.0007738299 4.057763e-05 -1.649126e-05 0.0001453584 0.0001628194 -7.354731e-05 4.821444e-07 -4.927701e-06 -1.942371e-05 -2.817499e-06 1.946396e-06 3.04243e-06 2.362282e-07 -2.5371e-07 -1.36993e-07 1.132432e-07", "LINE_SCALE=2360", "SAMP_NUM_COEFF=0.04337163 1.775948 -0.87108 0.007425391 0.01783631 0.0004057179 -0.000184695 -0.04257537 -0.01127869 -1.531228e-06 1.017961e-05 0.000572344 -0.0002941 -0.0001301705 -0.0003289546 5.394918e-05 6.388447e-05 -4.038289e-06 -7.525785e-06 -5.431241e-07", "LONG_SCALE=0.8383", "SAMP_SCALE=593", "SAMP_OFF=589", "LAT_SCALE=1.4127", "LAT_OFF=33.8992", "LINE_OFF=2359", "LINE_DEN_COEFF=1 0.0007273139 -0.0006006867 -4.272095e-07 2.578717e-05 4.718479e-06 -2.116976e-06 -1.347805e-05 -2.209958e-05 8.131258e-06 -7.290143e-08 5.105109e-08 -7.353388e-07 0 2.131142e-06 9.697701e-08 1.237039e-08 7.153246e-08 6.758015e-08 5.811124e-08", "HEIGHT_SCALE=96.3" ] mem_ds.SetMetadata(rpc, "RPC") mem_ds.GetRasterBand(1).Fill(255) cutlineDSName = '/vsimem/test_gdalwarp_lib_128.json' cutline_ds = ogr.GetDriverByName('GeoJSON').CreateDataSource(cutlineDSName) cutline_lyr = cutline_ds.CreateLayer('cutline') f = ogr.Feature(cutline_lyr.GetLayerDefn()) f.SetGeometry( ogr.CreateGeometryFromWkt( 'POLYGON ((7.2151 32.51930,7.214316 32.58116,7.216043 32.59476,7.21666 32.5193,7.2151 32.51930))' )) cutline_lyr.CreateFeature(f) f = None cutline_lyr = None cutline_ds = None # Default is GDALWARP_DENSIFY_CUTLINE=YES ds = gdal.Warp( '', mem_ds, format='MEM', cutlineDSName=cutlineDSName, dstSRS='EPSG:4326', outputBounds=[7.2, 32.52, 7.217, 32.59], xRes=0.000226555, yRes=0.000226555, transformerOptions=['RPC_DEM=data/test_gdalwarp_lib_128_dem.tif']) cs = ds.GetRasterBand(1).Checksum() if cs != 4248: gdaltest.post_reason('bad checksum') print(cs) return 'fail' # Below steps depend on GEOS if not ogrtest.have_geos(): gdal.Unlink(cutlineDSName) return 'success' gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', 'ONLY_IF_INVALID') ds = gdal.Warp( '', mem_ds, format='MEM', cutlineDSName=cutlineDSName, dstSRS='EPSG:4326', outputBounds=[7.2, 32.52, 7.217, 32.59], xRes=0.000226555, yRes=0.000226555, transformerOptions=['RPC_DEM=data/test_gdalwarp_lib_128_dem.tif']) gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', None) cs = ds.GetRasterBand(1).Checksum() if cs != 4248: gdaltest.post_reason('bad checksum') print(cs) return 'fail' gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', 'NO') with gdaltest.error_handler(): ds = gdal.Warp( '', mem_ds, format='MEM', cutlineDSName=cutlineDSName, dstSRS='EPSG:4326', outputBounds=[7.2, 32.52, 7.217, 32.59], xRes=0.000226555, yRes=0.000226555, transformerOptions=['RPC_DEM=data/test_gdalwarp_lib_128_dem.tif']) gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', None) if ds is not None: gdaltest.post_reason('expected none return') return 'fail' gdal.Unlink(cutlineDSName) return 'success'
work_folder += "40" + "/" source_folder = "../uploads" sentinel_file = source_folder + "/citra/" shp_file = source_folder + "/area/" clipped_file = "" grid_file = "" raster_tahun_tanam = "" kode_area = "18" null_value = -9999 format_file = "GTiff" pixel_size = 10.0 vector_path = "D:/Indra/Tesis/Ngoprek6/Basemap/Best Agro/SPLIT" sentinel_file = "C:/xampp/htdocs/pkt/uploads/citra/18/S2B_MSIL2A_20190822_bestagro B1_super_resolved.tif" for id in range(1, 177, 1): shp_file = vector_path + "/id_" + str(id) + ".gpkg" clipped_file = vector_path + "/id_" + str(id) + ".tif" warp_opts = gdal.WarpOptions( format=format_file, cutlineDSName=shp_file, cropToCutline=True, dstNodata=null_value, xRes=pixel_size, yRes=pixel_size, ) gdal.Warp(clipped_file, sentinel_file, options=warp_opts) print("DONE!")
def calculate(self, process_path): qgs = QgsApplication([], False) qgs.initQgis() Processing.initialize() QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms()) gdal.AllRegister() #for alg in QgsApplication.processingRegistry().algorithms(): # print(alg.id(), "->", alg.displayName()) # read raster #inputRaster = input_mdt # read maximum depth #max_depth = max_depth # read distance #distance = distance # minimum size #size = min_size #outPath2 = output_path #process_path = "/home/rodrigo/data/d/process" layer_raster = QgsRasterLayer(self.input_mdt, os.path.basename(self.input_mdt), "gdal") data_mdt = layer_raster.dataProvider() extent_raster = data_mdt.extent() xmin_raster = extent_raster.xMinimum() xmax_raster = extent_raster.xMaximum() ymin_raster = extent_raster.yMinimum() ymax_raster = extent_raster.yMaximum() extent_raster_str = str(xmin_raster) + "," + str(xmax_raster) + "," + str(ymin_raster) + "," + str(ymax_raster) cellSize = layer_raster.rasterUnitsPerPixelX() #stream = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/stream.tif" stream = process_path + "/stream.tif" Processing.runAlgorithm("grass7:r.watershed",{ 'elevation': self.input_mdt, 'depression': None, 'flow': None, 'disturbed_land': None, 'blocking': None, 'threshold': self.size, 'max_slope_length': None, 'convergence': 5, 'memory': 300, '-s': False, '-m': False, '-4': False, '-a': False, '-b': False, 'accumulation': None, 'drainage': None, 'basin': None, 'stream': stream, 'half_basin': None, 'length_slope': None, 'slope_steepness': None, 'tci': None, 'spi': None, 'GRASS_REGION_PARAMETER': extent_raster_str + '[EPSG:3763]', 'GRASS_REGION_CELLSIZE_PARAMETER': cellSize, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # condition stream > 1 to have the lines with value 1 #stream_ones = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/stream_ones.tif" stream_ones = process_path + "/stream_ones.tif" Processing.runAlgorithm("grass7:r.mapcalc.simple",{ 'a': str(stream), 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'expression': 'A>1', 'output': stream_ones, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # raster distance #raster_distance = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/raster_distance.tif" raster_distance = process_path + "/raster_distance.tif" #Processing.runAlgorithm("saga:proximitygrid", None, str(stream_ones_str), 3, str(raster_distance), None, None) #Processing.runAlgorithm("saga:proximityraster", { # 'FEATURES': str(stream_ones), # 'DISTANCE': str(raster_distance), 'DIRECTION': 'TEMPORARY_OUTPUT', 'ALLOCATION': 'TEMPORARY_OUTPUT'}) Processing.runAlgorithm("grass7:r.grow.distance", { 'input': str(stream_ones), 'metric': 0, '-m': False, '-': False, 'distance': str(raster_distance), 'value': 'TEMPORARY_OUTPUT', 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # condition distance >= 200, always maximum depth meters #dist_major_200 = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/dist_major_200.tif" dist_major_200 = process_path + "/dist_major_200.tif" Processing.runAlgorithm("grass7:r.mapcalc.simple", { 'a': str(raster_distance), 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'expression': "A>="+str(self.distance), 'output': dist_major_200, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) #dist_multiplication = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/dist_multiplication.tif" dist_multiplication = process_path + "/dist_multiplication.tif" Processing.runAlgorithm("grass7:r.mapcalc.simple", { 'a': str(dist_major_200), 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'expression': "A*"+str(self.max_depth), 'output': dist_multiplication, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # condition distance < 200, inteprolation between 0 and maximum depth #dist_minor_200 = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/dist_minor_200.tif" dist_minor_200 = process_path + "/dist_minor_200.tif" Processing.runAlgorithm("grass7:r.mapcalc.simple", { 'a': str(raster_distance), 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'expression': "A<"+str(self.distance), 'output': dist_minor_200, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # multiplication by the raster distance #dist_multiplication_dist = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/dist_multiplication_dist.tif" dist_multiplication_dist = process_path + "/dist_multiplication_dist.tif" #Processing.runAlgorithm("grass7:r.mapcalc.simple", # {'a': str(dist_minor_200), # 'b': str(dist_major_200), # 'c': None, 'd': None, 'e': None, 'f': None, # 'expression': 'A*B', # 'output': dist_multiplication_dist, 'GRASS_REGION_PARAMETER': None, # 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', # 'GRASS_RASTER_FORMAT_META': ''}) Processing.runAlgorithm("grass7:r.mapcalc.simple", { 'a': str(dist_minor_200), 'b': str(raster_distance), 'c': None, 'd': None, 'e': None, 'f': None, 'expression': 'A*B', 'output': dist_multiplication_dist, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # interpolation between 0 and distance #interpolation_dist = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/interpolation_dist.tif" interpolation_dist = process_path + "/interpolation_dist.tif" Processing.runAlgorithm("grass7:r.mapcalc.simple", { 'a': str(dist_multiplication_dist), 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'expression': "A*"+str(self.max_depth)+"/"+str(self.distance), 'output': interpolation_dist, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # depth surface = sum of two conditions #depth_surface = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/depth_surface.tif" depth_surface = process_path + "/depth_surface.tif" Processing.runAlgorithm("grass7:r.mapcalc.simple", { 'a': str(dist_multiplication), 'b': str(dist_multiplication_dist), 'c': None, 'd': None, 'e': None, 'f': None, 'expression': 'A+B', 'output': depth_surface, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # indexes for topography """rattings_lista = [] for linha in ratings: for coluna in linha: rattings_lista = rattings_lista + [str(coluna)] string = "," intervalos = string.join(rattings_lista) results = list(map(float, rattings_lista)) print(results)""" #Processing.runAlgorithm("saga:reclassifyvalues",{'INPUT': depth_surface, 'METHOD':2, 'OLD':0, 'NEW':1, 'SOPERATOR':0, 'MIN':0, 'MAX':1, # 'RNEW':2, 'ROPERATOR':0, 'RETAB':results, 'TOPERATOR':0, 'NODATAOPT':True, 'NODATA':0, # 'OTHEROPT':True, 'OTHERS':0, 'RESULT':outPath2}) result = process_path + "/result.tif" Processing.runAlgorithm("native:reclassifybytable", { 'INPUT_RASTER': str(depth_surface), 'RASTER_BAND': 1, 'TABLE': self.rattings, 'NO_DATA': -9999, 'RANGE_BOUNDARIES': 0, 'NODATA_FOR_MISSING': False, 'DATA_TYPE': 5, 'OUTPUT': result }) out_raster = gdal.Open(result) gdal.Warp(self.output_file, out_raster, dstSRS="EPSG:3857")
def rectify(project_path, img_fname, img_rectified_fname, eo, ground_height, sensor_width, focal_length, gsd='auto'): """ In order to generate individual ortho-image, this function rectifies a given drone image on a reference plane. :param img_fname: :param img_rectified_fname: :param eo: :param project_path: :param ground_height: Ground height in m :param sensor_width: Width of the sensor in mm :param gsd: GSD in m. If not specified, it will automatically determine gsd. :return File name of rectified image, boundary polygon in WKT string """ img_path = os.path.join(project_path, img_fname) start_time = time.time() print('Read the image - ' + img_fname) image = cv2.imread(img_path) # 0. Extract EXIF data from a image # focal_length, orientation = getExif(img_path) # unit: m orientation = 0 # 1. Restore the image based on orientation information restored_image = restoreOrientation(image, orientation) image_rows = restored_image.shape[0] image_cols = restored_image.shape[1] pixel_size = sensor_width / image_cols # unit: mm/px pixel_size = pixel_size / 1000 # unit: m/px end_time = time.time() print("--- %s seconds ---" % (time.time() - start_time)) read_time = end_time - start_time print('Read EOP - ' + img_fname) print('Northing | Easting | Height | Omega | Phi | Kappa') converted_eo = convertCoordinateSystem(eo) print(converted_eo) R = Rot3D(converted_eo) # 2. Extract a projected boundary of the image bbox = boundary(restored_image, converted_eo, R, ground_height, pixel_size, focal_length) print("--- %s seconds ---" % (time.time() - start_time)) if gsd == 'auto': gsd = (pixel_size * (converted_eo[2] - ground_height)) / focal_length # unit: m/px # Boundary size boundary_cols = int((bbox[1, 0] - bbox[0, 0]) / gsd) boundary_rows = int((bbox[3, 0] - bbox[2, 0]) / gsd) print('projectedCoord') start_time = time.time() proj_coords = projectedCoord(bbox, boundary_rows, boundary_cols, gsd, converted_eo, ground_height) print("--- %s seconds ---" % (time.time() - start_time)) # Image size image_size = np.reshape(restored_image.shape[0:2], (2, 1)) print('backProjection') start_time = time.time() backProj_coords = backProjection(proj_coords, R, focal_length, pixel_size, image_size) print("--- %s seconds ---" % (time.time() - start_time)) print('resample') start_time = time.time() b, g, r, a = resample(backProj_coords, boundary_rows, boundary_cols, image) print("--- %s seconds ---" % (time.time() - start_time)) print('Save the image in GeoTiff') start_time = time.time() img_rectified_fname_kctm = img_rectified_fname.split('.')[0] + '_kctm.tif' dst = os.path.join(project_path, img_rectified_fname_kctm) createGeoTiff(b, g, r, a, bbox, gsd, boundary_rows, boundary_cols, dst) # GDAL warp to reproject from EPSG:5186 to EPSG:4326 gdal.Warp(os.path.join(project_path, img_rectified_fname), gdal.Open(os.path.join(project_path, img_rectified_fname_kctm)), format='GTiff', srcSRS='EPSG:5186', dstSRS='EPSG:4326') # Remove orthoimage georeferenced as EPSG:5186 os.remove(os.path.join(project_path, img_rectified_fname_kctm)) print("--- %s seconds ---" % (time.time() - start_time)) print('*** Processing time per each image') print("--- %s seconds ---" % (time.time() - start_time + read_time)) bbox_wkt = export_bbox_to_wkt(bbox) return bbox_wkt
def main_clip_raster(path_img: str, path_geom: str, pad: int, res: int, path_out: str = None, attr: str = None, no_split=False, add_mask=False): bn_img = os.path.splitext(os.path.basename(path_img))[0] bn_geom = os.path.splitext(os.path.basename(path_geom))[0] # idx_pref = bn_geom if path_out is None: path_out = os.path.splitext(path_img)[0] if no_split: idx_pref = 'clip' else: idx_pref = None # geom = gp.read_file(path_geom) geom = __reproject_to_img(geom, path_img) geom_bboxes = get_geom_bboxes(geom, pad, idx_def=idx_pref, attr=attr, no_split=no_split) # num = len(geom_bboxes) for xi, x in enumerate(geom_bboxes): g = x['geom'] bbox_xy = x['bbox'] fidx = x['idx'] if fidx is not None: pout_img = path_out + '_' + fidx + '.tif' pout_msk = path_out + '_' + fidx + '_msk.tif' else: pout_img = path_out + '.tif' pout_msk = path_out + '_msk.tif' if os.path.isfile(pout_img): logging.warning(f'\t!!! output file exist, skip ... [{pout_img}]') if add_mask: logging.info('\t\t({}/{}) (1) (rasterize-mask) #plgn={}'.format( xi, num, len(g))) ds_msk = rasterize_geom_mask(geom_xy=g, bbox_xy=bbox_xy, res=res) logging.info( '\t\t({}/{}) (2) (crop-image) size-m = {:0.2f}x{:0.2f}, res={:0.2f} (m/pix)' .format(xi, num, bbox_xy[1][0] - bbox_xy[0][0], bbox_xy[1][1] - bbox_xy[0][1], res)) ds_img = gdal.Warp('', path_img, format='MEM', outputBounds=np.array(bbox_xy).reshape(-1).tolist(), xRes=res, yRes=res, resampleAlg='bilinear') pout_img_tmp = generate_tmp_path(pout_img) pout_msk_tmp = generate_tmp_path(pout_msk) if add_mask: logging.info('\t({}/{}) mask -> ({})'.format(xi, num, pout_msk)) gdal.Translate(pout_msk_tmp, ds_msk, creationOptions=['COMPRESS=LZW', 'NBITS=1']) shutil.move(pout_msk_tmp, pout_msk) logging.info('\t({}/{}) crop-img -> ({})'.format(xi, num, pout_img)) gdal.Translate(pout_img_tmp, ds_img, creationOptions=['COMPRESS=LZW']) shutil.move(pout_img_tmp, pout_img)
def calculation(output_directory, inputs_raster_selection, inputs_parameter_selection): """ Main function """ now = datetime.datetime.now() data = dict(output_directory=output_directory, inputs_raster_selection=inputs_raster_selection, inputs_parameter_selection=inputs_parameter_selection) with open(f"/tmp/req_wind_{now:%y-%m-%d_%H%M%S}.json", "w") as jsn: json.dump(data, jsn) # list of error messages # TODO: to be fixed according to CREM format messages = [] res = {} res["name"] = CM_NAME res["graphics"] = [] res["indicator"] = [] # retrieve the inputs all input defined in the signature w_in = { "res_hub": float(inputs_parameter_selection["res_hub"]), "height": float(inputs_parameter_selection["height"]), "setup_costs": int(inputs_parameter_selection["setup_costs"]), "tot_cost_year": (float(inputs_parameter_selection["maintenance_percentage"]) / 100 * int(inputs_parameter_selection["setup_costs"])), "financing_years": int(inputs_parameter_selection["financing_years"]), "peak_power": float(inputs_parameter_selection["peak_power"]), } discount_rate = float(inputs_parameter_selection["discount_rate"]) print(f"w_in={w_in}") # retrieve the inputs layes # ds = gdal.Open(inputs_raster_selection["output_wind_speed"]) print(f"inputs_raster_selection={inputs_raster_selection}") print(f"inputs_raster_selection.keys()={inputs_raster_selection.keys()}") ds = gdal.Warp( "warp_test.tif", inputs_raster_selection["output_wind_speed"], outputType=gdal.GDT_Float32, xRes=w_in["res_hub"], yRes=w_in["res_hub"], dstNodata=0, ) plant_raster = ds.ReadAsArray() potential = ds.ReadAsArray() potential = np.nan_to_num(potential) plant_raster = np.nan_to_num(plant_raster) plant_raster[plant_raster > 0] = 1 # TODO: set peak power and swept area from a list of turbines wind_plant = wind.WindPlant( id_plant="Wind", peak_power=w_in["peak_power"], height=w_in["height"], model="Enercon E48 800", ) wind_plant.area = w_in["res_hub"] * w_in["res_hub"] wind_plant.n_plants = plant_raster.sum() wind_plant.prof = None print(f"wind_plant.n_plants: {wind_plant.n_plants}") if wind_plant.n_plants > 0: wind_plant.raw = False wind_plant.mean = None wind_plant.lat, wind_plant.lon = rr.get_lat_long(ds, potential) try: wind_plant.prof = wind_plant.profile() wind_plant.energy_production = wind_plant.prof.sum()["electricity"] wind_plant.resolution = ["Hours", "hourly"] """ run_source(kind, pl, data_in, most_suitable, n_plant_raster, discount_rate, ) """ try: res = run_source( "Wind", wind_plant, w_in, potential, plant_raster, discount_rate, res, ) except Exception as exc: print(f"FAILED to execute run_source function due to {exc}") except Exception: messages.append(( "Not able to reach the RenewableNinja website to retrieve the hourly values", "-", "-", )) else: res["indicator"].append( dict( unit="", name= "Not suitable pixels have been identified in the area selected, please select another area", value=0, ), ) print("Not suitable pixels have been identified.") for msgtxt, msgval, msgunt in messages: res["indicator"].append({ "unit": msgunt, "name": "WARNING: " + msgtxt, "value": msgval }) print("Wind computation completed") return res
def smoothTaData(self, ALEXIgeodict): ALEXILatRes = ALEXIgeodict['ALEXI_LatRes'] ALEXILonRes = ALEXIgeodict['ALEXI_LonRes'] sceneID = self.sceneID scene = self.scene outFN = os.path.join(self.resultsBase, scene, '%s_Ta.tif' % sceneID[:-5]) inProj4 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' # =======================convert fine TA to coarse resolution========= outfile = os.path.join(self.resultsBase, scene, 'Ta_DisALEXI.tif') coarseFile = os.path.join(self.resultsBase, scene, 'TaCoarse.tif') coarse2fineFile = os.path.join(self.resultsBase, scene, 'TaCoarse2Fine.tif') if not os.path.exists(outFN): print 'get->Ta' # get mask from Landsat LAI ls = GeoTIFF(outfile) sceneDir = os.path.join(self.satscene_path, 'CF_MASK') maskFN = os.path.join(sceneDir, '%s_Mask.tif' % sceneID) g = gdal.Open(maskFN, GA_ReadOnly) cfmask = g.ReadAsArray() g = None # =============find Average Ta====================================== COMMENTED FOR TESTING in_ds = gdal.Open(outfile) coarseds = gdal.Translate(coarseFile, in_ds, options=gdal.TranslateOptions( resampleAlg='average', xRes=400, yRes=400)) fineds = gdal.Warp(outFN, coarseds, options=gdal.WarpOptions(resampleAlg='average', height=ls.nrow, width=ls.ncol)) coarseds = None # ========smooth Ta data======================================== ta = fineds.ReadAsArray() fineRes = ls.Lat[1, 0] - ls.Lat[0, 0] coarseRes = ALEXILatRes course2fineRatio = coarseRes**2 / fineRes**2 rid2 = int(np.sqrt(course2fineRatio)) # gauss_kernal = Gaussian2DKernel(rid2) box_kernal = Box2DKernel(rid2) ta = convolve_fft(ta, box_kernal, allow_huge=True) fineds.GetRasterBand(1).WriteArray(ta) fineds = None ulx = ls.ulx uly = ls.uly delx = ls.delx dely = -ls.dely fineRes = ls.Lat[1, 0] - ls.Lat[0, 0] coarseRes = ALEXILatRes inUL = [ulx, uly] inRes = [delx, dely] # Ta = interp_ta(ta,coarseRes,fineRes)-273.16 # Ta = ta - 273.16 # FOR TESTING!! Ta = ta outFormat = gdal.GDT_Float32 writeArray2Tiff(Ta, inRes, inUL, ls.proj4, outFN, outFormat) os.remove(coarseFile)
def compare_modis(self): """ Compare the annual map obtained with gap filling approach to the Modis annual map. """ modis_snowserie = str(self.params.get("modis_snow_map")) modis_datefile = self.params.get("modis_snow_map_dates") self.modis_annual_snow_map = op.join(self.path_tmp, "modis_annual_snowmap.tif") modis_dates = read_list_from_file(modis_datefile) modis_start_index = None modis_stop_index = None for i in range(0, len(modis_dates)): tmp_date = str_to_datetime(modis_dates[i], "%Y,%m,%d") if tmp_date == self.date_start: modis_start_index = i if tmp_date == self.date_stop: modis_stop_index = i # generate the summary map band_index = range(modis_start_index + 1, modis_stop_index + 2) expression = "+".join( ["(im1b" + str(i) + "==200?1:0)" for i in band_index]) if not op.exists(self.modis_annual_snow_map): bandMathApp = band_math([modis_snowserie], self.modis_annual_snow_map, expression, self.ram, otb.ImagePixelType_uint16) bandMathApp.ExecuteAndWriteOutput() bandMathApp = None shutil.copy2(self.modis_annual_snow_map, self.path_out) # Compute intersection of the raster footprint intersection, srs = get_raster_intersection(self.annual_snow_map, self.modis_annual_snow_map) # Export intersection as shapefile intersection_shapefile = op.join(self.path_tmp, "intersection.shp") write_poly_to_shapefile(intersection, intersection_shapefile, srs) # Crop to intersection S2 map s2_cropped = self.annual_snow_map.replace(".tif", "_cropped.tif") gdal.Warp(s2_cropped, self.annual_snow_map, format='GTiff', cutlineDSName=intersection_shapefile, cropToCutline=True, dstNodata=-1, outputType=gdal.GDT_Int16) shutil.copy2(s2_cropped, self.path_out) # Crop to intersection MODIS map modis_cropped = self.modis_annual_snow_map.replace( ".tif", "_cropped.tif") gdal.Warp(modis_cropped, self.modis_annual_snow_map, format='GTiff', cutlineDSName=intersection_shapefile, cropToCutline=True, dstNodata=-1, outputType=gdal.GDT_Int16) shutil.copy2(modis_cropped, self.path_out) # Crop to intersection DEM dem_cropped = op.join(self.path_tmp, "dem_cropped.tif") gdal.Warp(dem_cropped, self.dem, format='GTiff', cutlineDSName=intersection_shapefile, cropToCutline=True, dstNodata=-1, outputType=gdal.GDT_Int16) shutil.copy2(dem_cropped, self.path_out) # Reproject the DEM onto MODIS footprint dem_cropped_reprojected = op.join(self.path_tmp, "dem_cropped_reprojected.tif") super_impose_app = super_impose(modis_cropped, dem_cropped, dem_cropped_reprojected, "bco", -1, self.ram, otb.ImagePixelType_int16) super_impose_app.ExecuteAndWriteOutput() super_impose_app = None shutil.copy2(dem_cropped_reprojected, self.path_out) compute_annual_stats(s2_cropped, dem_cropped, modis_cropped, dem_cropped_reprojected, self.path_out, "intersection")
for i in archivos: print('Archivo:' + i) data, xmin, ymin, xmax, ymax, nx, ny = G16.extraeNetCDFL2(i) #print data, xmin, ymin, xmax, ymax, nx, ny G16.creaTiff(data, xmin, ymin, xmax, ymax, nx, ny) ds = gdal.Open('tmp.tif') gdal.Translate('tmp_rec.tif',ds,options = gdal.TranslateOptions(projWin=coor)) ds = gdal.Open('tmp_rec.tif') gdal.Warp('tmp_rec_4326.tif',ds,options=gdal.WarpOptions(dstSRS='EPSG:4326',dstNodata=-9999.000)) ds = gdal.Open('tmp_rec_4326.tif') gdal.Translate('tmp_rec_4326_rec.tif',ds,options = gdal.TranslateOptions(projWin=coor2)) ds = gdal.Open('tmp_rec_4326_rec.tif') data = ds.ReadAsArray() data = data - 273.15 data[data == -9999.0] = np.nan #data[data >= 0.0] = np.nan #np.flipud(data)
def calculate(self, process_path): qgs = QgsApplication([], False) qgs.initQgis() Processing.initialize() QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms()) gdal.AllRegister() #for alg in QgsApplication.processingRegistry().algorithms(): # print(alg.id(), "->", alg.displayName()) # read mdt data inputRaster = self.input_mdt process_path = process_path outPath2 = self.output_file gdalRaster = gdal.Open(str(inputRaster)) x = gdalRaster.RasterXSize y = gdalRaster.RasterYSize geo = gdalRaster.GetGeoTransform() minx = geo[0] maxy = geo[3] maxx = minx + geo[1]*x miny = maxy + geo[5]*y #extent_raster = str(minx) + "," + str(maxx) + "," + str(miny) + "," + str(maxy) #pixelSize = geo[1] band_mdt = gdalRaster.GetRasterBand(1) #data_mdt = band_mdt.ReadAsArray(0, 0, x, y) Processing.initialize() # mdt_interp = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/mdt_interp" # Processing.runAlgorithm("grass7:r.surf.idw", None, inputRaster, 12, False, extent_raster, pixelSize, mdt_interp) # mdt = mdt_interp + "." + "tif" # # gdalMDT = gdal.Open(str(mdt_interp) + "." + "tif") # x_mdt = gdalMDT.RasterXSize # y_mdt = gdalMDT.RasterYSize # geo_mdt = gdalMDT.GetGeoTransform() # band_mdt = gdalMDT.GetRasterBand(1) # data_mdt = band_mdt.ReadAsArray(0,0,x_mdt,y_mdt) # coeficients a and b of the regression lines, y = ax + b, used for mean monthly precipitation, y(mm), as a function of altitude, x(m) # a = 0.99 # b = 542.22 # precip_mul = numpy.multiply(data_mdt,a) # precipitat = precip_mul + b # precipitation = numpy.array(precipitat) # recharge = numpy.multiply(precipitation, 0.15) recharge_without_rec = process_path + "recharge_without_rec" #Processing.runAlgorithm("gdal:rastercalculator",{ # 'INPUT_A': inputRaster, # 'BAND_A': 1, # 'INPUT_B': None, # 'BAND_B': -1, # 'INPUT_C': None, # 'BAND_C': -1, # 'INPUT_D': None, # 'BAND_D': -1, # 'INPUT_E': None, # 'BAND_E': -1, # 'INPUT_F': None, # 'BAND_F': -1, # 'FORMULA': '(A*0.99+542.22)*0.15', # 'NO_DATA': None, # 'RTYPE': 6, # 'EXTRA': '', # 'OPTIONS': '', # 'OUTPUT':recharge_without_rec #}) Processing.runAlgorithm("grass7:r.mapcalc.simple", { 'a': str(inputRaster), 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'expression': "((A*0.99)+542.22)*0.15", 'output': recharge_without_rec, 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '' }) # Create an output imagedriver with the multiplication result # driver2 = gdal.GetDriverByName( "GTiff" ) # outData2 = driver2.Create(str(recharge_without_rec+'.'+'tif'), x,y,1, gdal.GDT_Float32) # outData2.GetRasterBand(1).WriteArray(recharge) # outData2.SetGeoTransform(geo) #outData2 = None recharge_without_rec_file = gdal.Open(recharge_without_rec) recharge_without_rec_rep = process_path + "recharge_without_rec_rep" gdal.Warp(recharge_without_rec_rep, recharge_without_rec_file, dstSRS="EPSG:3763") #Processing.runAlgorithm("gdal:assignprojection", # {'INPUT': recharge_without_rec, # 'CRS': QgsCoordinateReferenceSystem('EPSG:3763')}) # indexes for topography for the two methods #numberRows = int(self.tableWidget.rowCount()) #numberColumns = int(self.tableWidget.columnCount()) #classes = '' #lista = [] #for i in range(0,numberRows): # for j in range(0,numberColumns): # self.line = self.tableWidget.item(i,j) # lista = lista + [str(self.line.text())] # string = "," # intervalos = string.join(lista) #results = list(map(int, lista)) #QMessageBox.about(self, 'teste', str(results)) Processing.initialize() result = process_path + "/result.tif" Processing.runAlgorithm("native:reclassifybytable", { 'INPUT_RASTER': recharge_without_rec_rep, 'RASTER_BAND': 1, 'TABLE': self.rattings, 'NO_DATA': -9999, 'RANGE_BOUNDARIES': 0, 'NODATA_FOR_MISSING': False, 'DATA_TYPE': 5, 'OUTPUT': result}) # add result into canvas #file_info_norm = QFileInfo(str(outPath2)) # QMessageBox.about(self, "teste", str(file_info_norm)) #rlayer_new_norm = QgsRasterLayer(outPath2, file_info_norm.fileName(), 'gdal') # QMessageBox.about(self, "teste", str(rlayer_new_norm)) #QgsProject.instance().addMapLayer(rlayer_new_norm) #self.iface.canvas.setExtent(rlayer_new_norm.extent()) # set the map canvas layer set #self.iface.canvas.setLayers([rlayer_new_norm]) # add result into canvas # file_info_recharge = QFileInfo(outPath2) # if file_info_recharge.exists(): # layer_name_recharge = file_info_recharge.baseName() # else: # return False # rlayer_new_recharge = QgsRasterLayer(outPath2, layer_name_recharge) # if rlayer_new_recharge.isValid(): # QgsMapLayerRegistry.instance().addMapLayer(rlayer_new_recharge) # layer_prec_recharge = QgsMapCanvasLayer(rlayer_new_recharge) # layerList_recharge = [layer_prec_recharge] # extent_recharge = self.iface.canvas.setExtent(rlayer_new_recharge.extent()) # self.iface.canvas.setLayerSet(layerList_recharge) # self.iface.canvas.setVisible(True) # return True # else: # return False #QMessageBox.information(self, self.tr( "Finished" ), self.tr( "Net Recharge completed." ) ) out_raster = gdal.Open(result) gdal.Warp(outPath2, out_raster, dstSRS="EPSG:3857")
intersectionWkt = footprint else: request = ogr.CreateGeometryFromWkt(requestWkt) intersection = footprint.Intersection(request) intersectionWkt = intersection.ExportToWkt() print 'Intersection WKT of ' + request.ExportToWkt( ) + ' with ' + footprint.ExportToWkt() + ' is ' + intersectionWkt #quit() directory = os.path.dirname(filename) print "Path is " + directory csvFile = open(directory + '/cutline.csv', 'w') csvFile.write('ID, WKT\n') csvFile.write('1, "' + intersectionWkt + '"\n') csvFile.close() prjFile = open(directory + '/cutline.prj', 'w') prjFile.write('EPSG:4326') prjFile.close() tmp_ds = gdal.Warp('temp', filename, format='MEM', cutlineDSName=directory + '/cutline.csv', cropToCutline=True, dstSRS='EPSG:4326') gdal.Translate(directory + '/out.tiff', tmp_ds, format='GTiff') # , photometric = 'RGB')
def clip(ds, outLayer, no_data=None, rect_file=None, enlarge=10, save_cache=False, ext=None, new=True): # Open the data source and read in the extent t = ds.GetGeoTransform() x_min, x_max, y_min, y_max = outLayer.GetExtent() ulX, ulY = geo2imagexy(ds, x_min, y_min) lrX, lrY = geo2imagexy(ds, x_max, y_max) clip_range = [min(ulX, lrX), min(ulY, lrY), abs(ulX - lrX) + 1, abs(ulY - lrY) + 1] ul_lon = t[0] + t[1] * clip_range[0] + t[2] * clip_range[1] ul_lat = t[3] + t[4] * clip_range[0] + t[5] * clip_range[1] lr_lon = t[0] + t[1] * (clip_range[0] + clip_range[2]) + \ t[2] * (clip_range[1] + clip_range[3]) lr_lat = t[3] + t[4] * (clip_range[0] + clip_range[2]) + \ t[5] * (clip_range[1] + clip_range[3]) bound = [min(ul_lon, lr_lon), min(ul_lat, lr_lat), max(ul_lon, lr_lon), max(ul_lat, lr_lat)] if save_cache: cache_dir = 'cache' if not os.path.exists(cache_dir): os.mkdir(cache_dir) if ext is None: poly_file = rep_file(cache_dir, 'poly.tif') burn_file = rep_file(cache_dir, 'burn.tif') elif new: poly_file = rep_file(cache_dir, str(ext) + '_poly.tif') burn_file = rep_file(cache_dir, str(ext) + '_burn.tif') else: poly_file = os.path.join(cache_dir, str(ext) + '_poly.tif') burn_file = os.path.join(cache_dir, str(ext) + '_burn.tif') else: poly_file = '/vsimem/_poly.tif' burn_file = '/vsimem/_burn.tif' # set no data if ds.GetRasterBand(1).GetNoDataValue() is not None: no_data = ds.GetRasterBand(1).GetNoDataValue() if no_data is None: raise(ValueError("no_data must be initialed")) # create temp bool in_poly tif has_old = os.path.exists(burn_file) and not new # clip with rectangle if rect_file is None: rect_file = '/vsimem/_rect.tif' option = gdal.WarpOptions(multithread=True, options=CONFIG, creationOptions=CREATION, outputBounds=bound, dstSRS=outLayer.GetSpatialRef(), dstNodata=no_data, xRes=t[1], yRes=t[5], srcNodata=no_data, resampleAlg=gdal.GRA_NearestNeighbour) rect = gdal.Warp(rect_file, ds, options=option) if not has_old: # set geotransform trans = list(rect.GetGeoTransform()) trans[1] = trans[1] / enlarge trans[5] = trans[5] / enlarge # set SpatialReference srs = outLayer.GetSpatialRef() zeros_tif(poly_file, int(clip_range[2] * enlarge), int(clip_range[3] * enlarge), 1, gdal.GDT_Byte, trans, srs, no_data=2) poly_ds = gdal.Open(poly_file, gdal.GA_Update) # Rasterize gdal.RasterizeLayer(poly_ds, [1], outLayer, burn_values=[ 1], options=['ALL_TOUCHED=TRUE']) poly_ds = None option = gdal.WarpOptions(multithread=True, options=CONFIG, creationOptions=CREATION, xRes=rect.GetGeoTransform()[1], yRes=rect.GetGeoTransform()[5], resampleAlg=gdal.GRA_Average, outputType=gdal.GDT_Float32) burn_ds = gdal.Warp(burn_file, poly_file, options=option) burn_ds.GetRasterBand(1).SetNoDataValue(0) # return bool matrix in polygon burn_band = burn_ds.GetRasterBand(1) burn_data = burn_band.ReadAsArray() else: burn_ds = gdal.Open(burn_file) burn_band = burn_ds.GetRasterBand(1) burn_data = burn_band.ReadAsArray() # change rect for c in range(1, rect.RasterCount + 1): rect_band = rect.GetRasterBand(c) if rect_band.GetNoDataValue() is None: rect_band.SetNoDataValue(no_data) block_write(rect, [rect_band, burn_band], rect_band, map_burn) burn_ds = None poly_ds = None return rect, burn_data
def run( url=None, obstmp=None, clipshpfn=None, inputs=None, tmpdir=None, models=None, nocleanup=False, **args, ): """ Load and prepare the data required for the change detection algorithms and then pass this data to the algorithm. Use `args` to parametrise. """ log("# Loading models") loaded_models = [] for m in models: config = deepcopy(m) name = config.pop("name") outfn = config.pop("output") driver = config.pop("driver") inputs = config.pop("inputs") log(f"Model: {name} -> {outfn}") try: model = get_model(name, **config) if model.verbose: model.log = log except IncorrectChecksumError as e: warning(f"Model has an incorrect SHA256 checksum, exiting...") sys.exit(1) loaded_models.append(model) log("# Retrieving NRT observation details") url = normalise_url(url) log(f"Obs. URL: {url}") obswkt = get_bounds(url) obsdate = parse_obsdate(url) log(f"Obs. Date: {obsdate}") log(f"Obs. WKT: {wktfmt(obswkt)}") source = DEASentinel2() # Determine the minimal set of bands required across all models def name(x): if isinstance(x, int): return source.bands[x] else: return x bands = {band: False for band in source.bands} for model in loaded_models: try: required = [name(b) for b in model.required_bands] except AttributeError: required = source.bands for band in required: bands[band] = True bands = [k for k, v in bands.items() if v is True] log(f"Req.Bands: {','.join(bands)}") if len(bands) == 0: # get at least one band bands = ["B02"] obsgeo, obsprj, obsdata, mask = source.get_observations( url, bands_required=bands) ysize, xsize = mask.shape obspoly = polygon_from_geobox(obsgeo, xsize, ysize) log(f"# Preparing ancillary data") log(f"Writing mask to mask.tif") driver = gdal.GetDriverByName("GTiff") fd = driver.Create("mask.tif", xsize, ysize, 1, gdal.GDT_Byte) fd.SetGeoTransform(obsgeo) fd.SetProjection(obsprj) ob = fd.GetRasterBand(1) ob.WriteArray(mask) ob.SetNoDataValue(0) del fd ysize, xsize, psize = obsdata.shape log(f"Writing observation data to {obstmp}. Data has {psize} bands.") driver = gdal.GetDriverByName("GTiff") fd = driver.Create(obstmp, xsize, ysize, psize, gdal.GDT_Float32) fd.SetGeoTransform(obsgeo) fd.SetProjection(obsprj) for i in range(fd.RasterCount): ob = fd.GetRasterBand(i + 1) ob.WriteArray(obsdata[:, :, i]) ob.SetNoDataValue(np.nan) ob.SetDescription(bands[i]) del fd log(f"Determining ancillary files required") outputs = [] inputfns = [] for model in models: name = model["name"] log(f"Checking '{name}' model") output = model["output"] ips = model["inputs"] for ip in ips: fn = ip["filename"] if fn not in outputs: inputfns.append(fn) outputs.append(output) log(f"# Warping and clipping ancillary data") # Get the unique inputs inputfns = [*{*inputfns}] if len(inputfns) > 0: log("Determining clip area from NRT observation") clipshpfn = generate_clip_shape_from(obstmp, clipshpfn) if not clipshpfn.startswith("/vsimem"): log(f"Proj: {obsprj}") log(f"Saving clipping area to disk as '{clipshpfn}'") else: log(f"No ancillary datas are required!") datamap = {} obssr = osr.SpatialReference() obssr.ImportFromProj4(obsprj) for afn in inputfns: ofn = f"{tmpdir}/{uuid.uuid4()}" fd = gdal.Open(afn) geo = fd.GetGeoTransform() prj = fd.GetProjection() insr = osr.SpatialReference() insr.ImportFromProj4(prj) insr_to_obssr = osr.CoordinateTransformation(insr, obssr) poly = polygon_from_geobox(geo, fd.RasterXSize, fd.RasterYSize) poly.Transform(insr_to_obssr) if not poly.Intersects(obspoly): raise InputDataError( f"Input data '{afn}' does not intersect observation.") log(f"Clipping and warping input '{afn}' to '{ofn}'") fd = gdal.Warp(ofn, fd, cutlineDSName=clipshpfn, cropToCutline=True, dstSRS=obsprj) datamap[afn] = ofn # Get processing configuration parameters tilewidth = args.pop("tilewidth", None) obsscale = args.pop("obsscale", None) # Scale observation data if obsscale is not None: log(f"Scaling observation data by {obsscale}") obsdata *= float(obsscale) log("# Applying loaded models to data") for model, m in zip(loaded_models, models): config = deepcopy(m) name = config.pop("name") log(f"@Running '{name}' model") # Update model config based on new information from observation config["obsurl"] = url config["obswkt"] = obswkt config["obsdate"] = obsdate config["geo"] = obsgeo config["prj"] = obsprj config["bands"] = bands # possibly reduced set of bands log("Observation data:") log(f" data min: {np.nanmin(obsdata)} max: {np.nanmax(obsdata)}") log(f" pixel resolution: {obsgeo[1]:.4f} x {obsgeo[5]:.4f}") model.update(**config) # Prepare all the appropriate ancillary data sets and pass the # observation data as the last one in the list. datas = [mask.copy()] outfn = m["output"] inputfns = m["inputs"] log("Loading model inputs:") for ip in inputfns: fn = datamap[ip["filename"]] fd = gdal.Open(fn) geo = fd.GetGeoTransform() # First assume bands are the same as source ipbands = source.bands try: # Then see if they are overwritten in config ipbands = ip["bands"] except KeyError: # If that fails, try to get bandnames from file fbands = [] for i in range(fd.RasterCount): rb = fd.GetRasterBand(i + 1) desc = rb.GetDescription() if len(desc) > 0: fbands.append(desc) if len(fbands) == fd.RasterCount: ipbands = fbands log(f" - path: {ip['filename']}") log(f" bands: {','.join(ipbands)}") notreq = set(source.bands) - set(bands) toload = [b for b in ipbands if b not in notreq] bandidx = [ i + 1 for i, b in zip(range(fd.RasterCount), ipbands) if b not in notreq ] log(f" loading: {','.join(toload)}") nbands = len(bandidx) data = np.empty((ysize, xsize, nbands), dtype=np.float32) for i, bi in enumerate(bandidx): band = fd.GetRasterBand(bi) band.ReadAsArray( buf_type=gdal.GDT_Float32, buf_xsize=xsize, buf_ysize=ysize, buf_obj=data[:, :, i], ) nodata = fd.GetRasterBand(1).GetNoDataValue() data[data == nodata] = np.nan nnan = np.count_nonzero(np.isnan(data)) nval = xsize * ysize * nbands pnan = nnan / nval if pnan > 0.9: warning(f"clipped input '{afn}' has more than 90% no data") scale = ip.pop("scale", None) if scale is not None: log(f" scaling: {scale}") data *= scale log(f" data min: {np.nanmin(data)} max: {np.nanmax(data)}") log(f" pixel resolution: {geo[1]:.4f} x {geo[5]:.4f}") datas.append(data) datas.append(obsdata) log(f"Output: {outfn}") log("Running model predictions") # The model is responsible for saving its prediction to disk (or memory # using /vsimem) as it is best placed to make a decision on the format, etc. # A simple model only needs to implement the `predict` method but can also # implement `predict_and_save` if more control of writing output is needed. if tilewidth: model = TiledPrediction(model, int(tilewidth)) model.predict_and_save(outfn, *datas) datamap[outfn] = outfn log("Finished running predictions") if nocleanup: return log("# Cleaning up") for k, fn in datamap.items(): log(f"Removing {fn}") gdal.Unlink(fn)
def reproject_raster(img, resAlg, out, ref=None, dstSRS=None, srcNoData=None, dstNodata=None): """ :param img: :param resAlg: :param out: :param ref: :param dstSRS: :param srcNoData: :param dstNodata: :return: """ GDAL_resAlg = { "near": gdalconst.GRA_NearestNeighbour, "bilinear": gdalconst.GRA_Bilinear, "average": gdalconst.GRA_Average, "cubic": gdalconst.GRA_Cubic, "mode": gdalconst.GRA_Mode } # check for new CRS if dstSRS is None: dstSRS = img.GetProjection() if ref is not None: src_proj = img.GetProjection() src_trans = img.GetGeoTransform() ref_proj = ref.GetProjection() ref_trans = ref.GetGeoTransform() xdim = ref.RasterXSize ydim = ref.RasterYSize bands = img.RasterCount # check datatype: if resAlg == 'near': dtype = img.GetRasterBand(1).DataType else: dtype = 6 # Float32 resAlg = GDAL_resAlg[resAlg] dst = gdal.GetDriverByName('GTiff').Create( out, xdim, ydim, bands, dtype, options=['COMPRESS=DEFLATE']) dst.SetGeoTransform(ref_trans) dst.SetProjection(ref_proj) # check NoData value: if srcNoData is not None: img.GetRasterBand(1).SetNoDataValue(srcNoData) if dstNodata is not None: dst.GetRasterBand(1).SetNoDataValue(dstNodata) dst.GetRasterBand(1).Fill(dstNodata) gdal.ReprojectImage(img, dst, src_proj, ref_proj, resAlg) else: gdal.ReprojectImage(img, dst, src_proj, ref_proj, resAlg) del dst return gdal.Open(out) else: # check NoData value: if dstNodata is not None: fil = gdal.Warp(out, img, dstSRS=dstSRS, resampleAlg=resAlg, dstNodata=dstNodata) else: fil = gdal.Warp(out, img, dstSRS=dstSRS, resampleAlg=resAlg) del fil return gdal.Open(out)
# p is of the form # VNL_v2_npp_2020_global_vcmslcfg_c202102150000.median_masked.tif.gz, year = p.name[11:15] template = f"data/azml/conus_hls_median_{year}.vrt" cutline_layer = "data/azml/conus.geojson" output_file = f"/vsiaz/hls/viirs_{year}.tif" input_ds = gdal.Open("/vsigzip/" + str(p)) with rio.open(template) as t: bounds = list(t.bounds) crs = t.crs # Crop the dataset to CONUS so calculations can be done in memory cropped_ds = gdal.Warp( "", input_ds, format="VRT", cutlineDSName=cutline_layer, cropToCutline=True, ) # Normalize values and convert to integer for smaller disk size values = cropped_ds.ReadAsArray() # Some very small negatives values[values < 0] = 0 # Dave: sqrt of max normalized values. Here we use 5k as a "theoretical" # max among all years (given max among 2013, 2016, and 2020 as ~3k). # Then multiply by 10000 to save as integer values = np.sqrt(values / 5000.0) * 10000.0 output_ds = gdal_array.OpenArray(np.int16(values)) output_band = output_ds.GetRasterBand(1) output_band.SetScale(0.0001)
from osgeo import gdal input_cutline = sys.argv[1] input_raster = sys.argv[2] output_raster = sys.argv[3] ds = gdal.Warp(output_raster, input_raster, format = 'GTiff', cutlineDSName = input_cutline, cutlineLayer = 'extent', dstNodata = 0) ds = None
def makeNDVIandEVI(state_1, rsb01, rsb02, rsb03, names, NdviPath, EviPath): areas = { 'HuangHuaiHai': [482000, 3189000, 1574000, 4662000], 'ChangJiangZhongXiaYou': [-704000, 2596000, 1574000, 3841000], 'DongBei': [1033000, 4267000, 2207000, 5922000] } outputBounds = [-704000, 2596000, 2207000, 5922000] if (state_1 is None): print("state_1 is None") return if (rsb01 is None): return if (rsb02 is None): return if (rsb03 is None): return #state1是对遥感的像元质量检测,去掉没有云的 state1 = gdal_array.BandReadAsArray(state_1.GetRasterBand(1)) b3 = gdal_array.BandReadAsArray(rsb03.GetRasterBand(1)) b2 = gdal_array.BandReadAsArray(rsb02.GetRasterBand(1)) b1 = gdal_array.BandReadAsArray(rsb01.GetRasterBand(1)) ## NDVI ndvi = ((b2 - b1) * 1.0) / ((b2 + b1) * 1.0) ndvi[numpy.isnan(ndvi)] = -9999 ndvi[numpy.isinf(ndvi)] = -9999 ndvi[ndvi > 1] = -9999 ndvi[ndvi < -1] = -9999 nodata = rsb02.GetRasterBand(1).GetNoDataValue() ndvi[b2 == nodata] = -9999 nodata = rsb01.GetRasterBand(1).GetNoDataValue() ndvi[b1 == nodata] = -9999 try: state1_10 = state1 << 10 for i in range(0, 3600): for j in range(0, 3600): if state1_10[i][j] == 0: ndvi[2 * i][2 * j] = -9999 ndvi[2 * i][2 * j + 1] = -9999 ndvi[2 * i + 1][2 * j] = -9999 ndvi[2 * i + 1][2 * j + 1] = -9999 except Exception as e: print(e) outNdvi = gdal_array.SaveArray(ndvi, "fgf", format="MEM", prototype=rsb01) NdviFileName = 'MOD09GA.%s.%s.%s.tif' % (names[1], names[3], 'ndvi') NdviFile = os.path.join(NdviPath, NdviFileName) gdal.Warp( NdviFile, outNdvi, outputBounds=outputBounds, xRes=1000, yRes=1000, srcNodata=-9999, dstNodata=-9999, outputType=gdal.GDT_Float32, dstSRS= "+proj=aea +ellps=WGS84 +datum=WGS84 +lon_0=105 +lat_1=25 +lat_2=47 +units=m +" ) #only huanghuaihai areaNdviFileName = 'MOD09GA.%s.%s.%s.tif' % (names[1], names[3], 'huanghuaihai.ndvi') areaNdviFile = os.path.join(NdviPath, areaNdviFileName) gdal.Warp(areaNdviFile, NdviFile, outputBounds=[482000, 3189000, 1574000, 4662000]) #only dongbei '''
def load_file(self, filename, env): ds = gdal.Open(filename, gdal.GA_ReadOnly) if not ds: raise ValidationError(_("GDAL library was unable to open the file.")) if ds.RasterCount not in (3, 4): raise ValidationError(_("Only RGB and RGBA rasters are supported.")) dsdriver = ds.GetDriver() dsproj = ds.GetProjection() dsgtran = ds.GetGeoTransform() if dsdriver.ShortName not in SUPPORTED_DRIVERS: raise ValidationError( _("Raster has format '%(format)s', however only following formats are supported: %(all_formats)s.") # NOQA: E501 % dict(format=dsdriver.ShortName, all_formats=", ".join(SUPPORTED_DRIVERS)) ) if not dsproj or not dsgtran: raise ValidationError(_("Raster files without projection info are not supported.")) data_type = None alpha_band = None has_nodata = None for bidx in range(1, ds.RasterCount + 1): band = ds.GetRasterBand(bidx) if data_type is None: data_type = band.DataType elif data_type != band.DataType: raise ValidationError(_("Complex data types are not supported.")) if band.GetRasterColorInterpretation() == gdal.GCI_AlphaBand: assert alpha_band is None, "Multiple alpha bands found!" alpha_band = bidx else: has_nodata = (has_nodata is None or has_nodata) and ( band.GetNoDataValue() is not None) src_osr = osr.SpatialReference() src_osr.ImportFromWkt(dsproj) dst_osr = self.resource.srs.to_osr() reproject = not src_osr.IsSame(dst_osr) info = gdal.Info(filename, format='json') geom = Geometry.from_geojson(info['wgs84Extent']) self.footprint = ga.elements.WKBElement(bytearray(geom.wkb), srid=4326) self.fileobj = env.file_storage.fileobj(component='raster_mosaic') dst_file = env.raster_mosaic.workdir_filename(self.fileobj, makedirs=True) co = ['COMPRESS=DEFLATE', 'TILED=YES', 'BIGTIFF=YES'] if reproject: gdal.Warp( dst_file, filename, options=gdal.WarpOptions( format='GTiff', dstSRS='EPSG:%d' % self.resource.srs.id, dstAlpha=not has_nodata and alpha_band is None, creationOptions=co, ), ) else: gdal.Translate( dst_file, filename, options=gdal.TranslateOptions( format='GTiff', creationOptions=co ) ) self.build_overview()
def btn_calculate(self): if not self.folder_output.text(): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Choose an output folder where the output will be saved."), None) return # Note that the super class has several tests in it - if they fail it # returns False, which would mean this function should stop execution # as well. ret = super(DlgReportingSDG, self).btn_calculate() if not ret: return layer_traj = [l for l in self.layer_traj_list if l.name() == self.layer_traj.currentText()][0] layer_state = [l for l in self.layer_state_list if l.name() == self.layer_state.currentText()][0] layer_perf = [l for l in self.layer_perf_list if l.name() == self.layer_perf.currentText()][0] layer_lc = [l for l in self.layer_lc_list if l.name() == self.layer_lc.currentText()][0] # Check that all of the layers have the same coordinate system and TODO # are in 4326. if layer_traj.crs() != layer_state.crs(): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Coordinate systems of trajectory layer and state layer do not match."), None) return if layer_traj.crs() != layer_perf.crs(): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Coordinate systems of trajectory layer and performance layer do not match."), None) return if layer_traj.crs() != layer_lc.crs(): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Coordinate systems of trajectory layer and land cover layer do not match."), None) return # Resample the land cover data to match the resolutions of the other # layers: log('Reprojecting land cover...') ds_lc = reproject_dataset(layer_lc.dataProvider().dataSourceUri(), layer_traj.dataProvider().dataSourceUri(), layer_traj.rasterUnitsPerPixelX(), layer_lc.crs().toWkt()) log('crs: {}'.format(layer_lc.crs().toWkt())) temp_lc_file = tempfile.NamedTemporaryFile(suffix='.tif').name # ds_lc = gdal.Translate(temp_lc_file, # layer_lc.dataProvider().dataSourceUri(), # format='GTiff', # outputType=gdal.GDT_Int16, # xRes=layer_traj.rasterUnitsPerPixelX, # yRes=layer_traj.rasterUnitsPerPixelY, # outputSRS=layer_lc.crs().toWkt(), # resampleAlg=gdal.GRA_Mode) log('Reprojection of land cover finished.') # Check that all of the layers have the same resolution def res(layer): return (round(layer.rasterUnitsPerPixelX(), 10), round(layer.rasterUnitsPerPixelY(), 10)) if res(layer_traj) != res(layer_state): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Resolutions of trajectory layer and state layer do not match."), None) return if res(layer_traj) != res(layer_perf): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Resolutions of trajectory layer and performance layer do not match."), None) return # Check that all of the layers cover the area of interest if not self.aoi.within(QgsGeometry.fromRect(layer_traj.extent())): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Area of interest is not entirely within the trajectory layer."), None) return if not self.aoi.within(QgsGeometry.fromRect(layer_state.extent())): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Area of interest is not entirely within the state layer."), None) return if not self.aoi.within(QgsGeometry.fromRect(layer_perf.extent())): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Area of interest is not entirely within the performance layer."), None) return if not self.aoi.within(QgsGeometry.fromRect(layer_lc.extent())): QtGui.QMessageBox.critical(None, self.tr("Error"), self.tr("Area of interest is not entirely within the land cover layer."), None) return log('Combining degradation layers...') ds_traj = gdal.Open(layer_traj.dataProvider().dataSourceUri()) ds_state = gdal.Open(layer_state.dataProvider().dataSourceUri()) ds_perf = gdal.Open(layer_perf.dataProvider().dataSourceUri()) # Note trajectory significance is band 2 traj_band = ds_traj.GetRasterBand(2) block_sizes = traj_band.GetBlockSize() x_block_size = block_sizes[0] y_block_size = block_sizes[1] xsize = traj_band.XSize ysize = traj_band.YSize driver = gdal.GetDriverByName("GTiff") temp_deg_file = tempfile.NamedTemporaryFile(suffix='.tif').name dst_ds = driver.Create(temp_deg_file, xsize, ysize, 1, gdal.GDT_Int16, ['COMPRESS=LZW']) lc_traj = ds_lc.GetGeoTransform() dst_ds.SetGeoTransform(lc_traj) dst_srs = osr.SpatialReference() dst_srs.ImportFromWkt(ds_traj.GetProjectionRef()) dst_ds.SetProjection(dst_srs.ExportToWkt()) state_band = ds_state.GetRasterBand(1) perf_band = ds_perf.GetRasterBand(1) lc_band = ds_lc.GetRasterBand(1) log('Traj size: {}, {}'.format(traj_band.XSize, traj_band.YSize)) log('State size: {}, {}'.format(state_band.XSize, state_band.YSize)) log('Perf size: {}, {}'.format(perf_band.XSize, perf_band.YSize)) log('LC size: {}, {}'.format(lc_band.XSize, lc_band.YSize)) xsize = traj_band.XSize ysize = traj_band.YSize blocks = 0 for y in xrange(0, ysize, y_block_size): if y + y_block_size < ysize: rows = y_block_size else: rows = ysize - y for x in xrange(0, xsize, x_block_size): if x + x_block_size < xsize: cols = x_block_size else: cols = xsize - x deg = traj_band.ReadAsArray(x, y, cols, rows) state_array = state_band.ReadAsArray(x, y, cols, rows) perf_array = perf_band.ReadAsArray(x, y, cols, rows) lc_array = lc_band.ReadAsArray(x, y, cols, rows) # log('type lc_deg: {}'.format(type(lc_deg))) # log('type lc_array: {}'.format(type(lc_array))) deg[lc_array == -1] = -1 deg[(state_array == -1) & (perf_array == -1)] = -1 dst_ds.GetRasterBand(1).WriteArray(deg, x, y) del deg blocks += 1 dst_ds = None ds_traj = None ds_state = None ds_perf = None ds_lc = None log('Degradation layers combined.') # Use 'processing' to clip and crop mask_layer = QgsVectorLayer("Polygon?crs=epsg:4326", "mask", "memory") mask_pr = mask_layer.dataProvider() fet = QgsFeature() fet.setGeometry(self.aoi) mask_pr.addFeatures([fet]) mask_layer_file = tempfile.NamedTemporaryFile(suffix='.shp').name QgsVectorFileWriter.writeAsVectorFormat(mask_layer, mask_layer_file, "CP1250", None, "ESRI Shapefile") out_file = os.path.join(self.folder_output.text(), 'sdg_15_3_degradation.tif') gdal.Warp(out_file, temp_deg_file, format='GTiff', cutlineDSName=mask_layer_file, cropToCutline=True, dstNodata=9999) # Load the file add it to the map, and style it style_sdg_ld(out_file) # Calculate area degraded, improved, etc. deg_equal_area_tempfile = tempfile.NamedTemporaryFile(suffix='.shp').name ds_equal_area = gdal.Warp(deg_equal_area_tempfile, out_file, dstSRS='EPSG:54009') deg_gt = ds_equal_area.GetGeoTransform() res_x = deg_gt[1] res_y = -deg_gt[5] deg_array = ds_equal_area.GetRasterBand(1).ReadAsArray() area_deg = np.sum(deg_array == -1) * res_x * res_y / 1e6 area_stable = np.sum(deg_array == 0) * res_x * res_y / 1e6 area_imp = np.sum(deg_array == 1) * res_x * res_y / 1e6 area_water = np.sum(deg_array == 2) * res_x * res_y / 1e6 area_urban = np.sum(deg_array == 3) * res_x * res_y / 1e6 header = ("Area Degraded", "Area Stable", "Area Improved", "Water Area", "Urban Area") values = (area_deg, area_stable, area_imp, area_water, area_urban) out_file_csv = os.path.join(self.folder_output.text(), 'sdg_15_3_degradation.csv') with open(out_file_csv, 'wb') as fh: writer = csv.writer(fh, delimiter=',') for row in zip(header, values): writer.writerow(row) log('Area deg: {}, stable: {}, imp: {}, water: {}, urban: {}'.format(area_deg, area_stable, area_imp, area_water, area_urban)) self.close()
utm_dir = 'data_UTM/input/' make_directory(utm_dir) make_directory(utm_dir + 'temperature_degC/') make_directory(utm_dir + 'precipitation_mm/') make_directory(utm_dir + 'solar_radiation/') make_directory(utm_dir + 'month_prcp_day/') make_directory(utm_dir + 'snow_gm2/') make_directory(utm_dir + 'vegetation_percent_cover/') make_directory(utm_dir + 'wind_speed_monthly_clipped/') make_directory(utm_dir + 'soil/') dem_file_name = 'dem_aoi.tif' dem_file_path = os.path.join(data_dir, dem_file_name) dem_utm_path = os.path.join(utm_dir, dem_file_name) gdal.Warp(dem_utm_path, dem_file_path, srcSRS='EPSG:4326', dstSRS='EPSG:32648') dem_arr = read_raster_as_array(dem_utm_path) dem_arr[dem_arr == 0] = np.nan RasterSave(dem_arr, dem_utm_path, dem_utm_path) raindays = [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 3.0, 3.0, 1.0, 0.0, 0.0, 0.0] for k in range(0, 12): num = str(k + 1) if k + 1 < 10: num = '0' + str(k + 1) temp_file_path = 'temperature_degC/' + 'wc2.0_30s_tave_' + num + '.tif' temp_in_path = os.path.join(data_dir, temp_file_path) scaled_temp = os.path.join(scaled_dir, temp_file_path)
def project(self): print('Generating projected output file: ' + self.output_file_name) gdal.Warp(self.output_file_name, self.output_file, dstSRS='EPSG:4326')