def convert_hdf5_to_geotiff(filename: str) -> bool: from pathlib import Path ogr.UseExceptions() inp_dir = os.environ.get('NTL_HDF5_DIR') out_dir = os.environ.get('NTL_TIF_DIR') if filename.endswith('.h5'): targetfile = Path(filename).stem targetfile += '.tif' hdflayer = gdal.Open(inp_dir + filename, gdal.GA_ReadOnly) subhdflayer = hdflayer.GetSubDatasets()[0][0] rlayer = gdal.Open(subhdflayer, gdal.GA_ReadOnly) # collect bounding boxs horizontal_tile_number = int(rlayer.GetMetadata_Dict()[ "HorizontalTileNumber"]) vertical_tile_number = int(rlayer.GetMetadata_Dict()[ "VerticalTileNumber"]) west_bound_coord = (10 * horizontal_tile_number) - 180 north_bound_coord = 90 - (10 * vertical_tile_number) east_bound_coord = west_bound_coord + 10 south_bound_coord = north_bound_coord - 10 epsg = "-a_srs EPSG:4326" # WGS84 coordinate system translate_option_text = epsg+" -a_ullr " + str(west_bound_coord) + " " + str( north_bound_coord) + " " + str(east_bound_coord) + " " + str(south_bound_coord) translate_options = gdal.TranslateOptions( gdal.ParseCommandLine(translate_option_text)) gdal.Translate(out_dir + targetfile, rlayer, options=translate_options) return True return False
def mosaic(rstr_lst, out_path): ''' Takes a list of raster files and merges them together ''' print("--- starting mosaic ---") vrt_options = gdal.BuildVRTOptions(resampleAlg='nearest', addAlpha=True, xRes=30, yRes=30) #create the VRT with the raster list temp_vrt = gdal.BuildVRT('temp.vrt', rstr_lst, options=vrt_options) #we need to specify the translation option before, # here we add Gtiff and COMPRESS; to deal with big rasters and compression, the final output will have # 4gb #we can set other commands as well translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine(("-of Gtiff -co COMPRESS=LZW"))) #time it start_time = time.time() #apply gdalTranslate and then save the raster gdal.Translate(out_path, temp_vrt, options=translateoptions) #print a message as soon as it is over! print("--- {0} merged in {1} minutes ---".format( os.path.basename(out_path), round((time.time() - start_time) / 60, 2)))
def __init__(self, **kwargs): kwargs['defaults'] = { 'store_msg' : [],\ 'database' : None,\ 'product' : 'MCD15A3H',\ 'tile' : 'h08v06',\ 'log' : None,\ 'day' : '01',\ 'doy' : None, 'month' : '*',\ 'sds' : None, 'year' : "2019",\ 'site' : 'https://e4ftl01.cr.usgs.gov',\ 'size_check' : False,\ 'noclobber' : True,\ 'local_dir' : 'work',\ 'local_file' : None,\ 'db_file' : None,\ 'db_dir' : 'work',\ 'verbose' : False,\ 'stderr' : sys.stderr } self.__dict__.update(ginit(self, **kwargs)) if 'database' in self.__dict__ and type(self.database) == Database: # already have databse stored pass else: self.database = Database(self.db_file,\ **(fdict(self.__dict__.copy(),ignore=['db_dir','db_file']))) self.translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine("-of Gtiff -co COMPRESS=LZW")) # list of tiles if type(self.tile) is str: self.tile = [self.tile] if type(self.sds) is str: self.sds = [self.sds] if self.sds is not None: self.msg(f'initial SDS {self.sds}') self.required_sds = self.sds # for most transactions, we want all SDS # so self.sds should reflect that self.sds = None response = self.database.get_from_db('SDS', self.product) if response: self.msg("found SDS names in database") self.sds = response self.msg(self.sds) # require them all if 'required_sds' not in self.__dict__: self.required_sds = self.sds
def VNP_tif(inpath, outpath, layer1, layer2, layer3, layer4): if not os.path.exists(os.path.join(outpath, 'light')): os.makedirs(os.path.join(outpath, 'light')) if not os.path.exists(os.path.join(outpath, 'cloud')): os.makedirs(os.path.join(outpath, 'cloud')) if not os.path.exists(os.path.join(outpath, 'angle')): os.makedirs(os.path.join(outpath, 'angle')) if not os.path.exists(os.path.join(outpath, 'time')): os.makedirs(os.path.join(outpath, 'time')) f = os.listdir(inpath) for i in range(len(f)): if f[i].endswith('.h5'): infile = inpath + '//' + f[i] root_ds = gdal.Open(infile) ds_list = root_ds.GetSubDatasets() rlayer = gdal.Open(ds_list[layer1][0], gdal.GA_ReadOnly) alayer = gdal.Open(ds_list[layer2][0], gdal.GA_ReadOnly) clayer = gdal.Open(ds_list[layer3][0], gdal.GA_ReadOnly) tlayer = gdal.Open(ds_list[layer4][0], gdal.GA_ReadOnly) # collect bounding box coordinates # -a_ullr <ulx> <uly> <lrx> <lry> #a = rlayer.GetMetadata_Dict() WestBoundCoord = rlayer.GetMetadata_Dict( )["HDFEOS_GRIDS_VNP_Grid_DNB_WestBoundingCoord"] EastBoundCoord = rlayer.GetMetadata_Dict( )["HDFEOS_GRIDS_VNP_Grid_DNB_EastBoundingCoord"] NorthBoundCoord = rlayer.GetMetadata_Dict( )["HDFEOS_GRIDS_VNP_Grid_DNB_NorthBoundingCoord"] SouthBoundCoord = rlayer.GetMetadata_Dict( )["HDFEOS_GRIDS_VNP_Grid_DNB_SouthBoundingCoord"] EPSG = "-a_srs EPSG:4326" # WGS84 translateOptionText = EPSG + " -a_ullr " + WestBoundCoord + " " + NorthBoundCoord + " " + EastBoundCoord + " " + SouthBoundCoord translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine(translateOptionText)) outfile = outpath + '\\light\\' + f[i].strip('.h5') + '.tif' gdal.Translate(outfile, clayer, options=translateoptions) im_datas = alayer.GetRasterBand(1).ReadAsArray() min = np.min(im_datas[im_datas > 0]) if min < 9000: outfilea = outpath + '\\angle\\' + f[i].strip('.h5') + '.tif' gdal.Translate(outfilea, alayer, options=translateoptions) outfilet = outpath + '\\time\\' + f[i].strip('.h5') + '.tif' gdal.Translate(outfilet, tlayer, options=translateoptions)
def translate(vrtpath_in: str, raster_out_path: str, band: int): """GDAL translate Operation from VRT Args: vrtpath_in ([type]): [description] raster_out_path ([type]): [description] band ([int]): raster band """ log = Logger('translate') tmr = Timer() progbar = ProgressBar(100, 50, "Translating ") def translate_progress(progress, _msg, _data): progbar.update(int(progress * 100)) translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine( "-of Gtiff -b {} -co COMPRESS=DEFLATE".format(band))) gdal.Translate(raster_out_path, vrtpath_in, options=translateoptions, callback=translate_progress) log.info('completed in {}'.format(tmr.toString()))
def raster_warp(inraster: str, outraster: str, epsg, clip=None, warp_options: dict = {}): """ Reproject a raster to a different coordinate system. :param inraster: Input dataset :param outraster: Output dataset :param epsg: Output spatial reference EPSG identifier :param log: Log file object :param clip: Optional Polygon dataset to clip the output. :param warp_options: Extra GDALWarpOptions. :return: None https://gdal.org/python/osgeo.gdal-module.html#WarpOptions """ log = Logger('Raster Warp') if os.path.isfile(outraster): log.info( 'Skipping raster warp because output exists {}'.format(outraster)) return None log.info('Raster Warp input raster {}'.format(inraster)) log.info('Raster Warp output raster {}'.format(outraster)) log.info('Output spatial reference EPSG: {}'.format(epsg)) output_folder = os.path.dirname(outraster) if not os.path.isdir(output_folder): os.mkdir(output_folder) warpvrt = os.path.join(os.path.dirname(outraster), 'temp_gdal_warp_output.vrt') log.info('Performing GDAL warp to temporary VRT file.') if clip: log.info('Clipping to polygons using {}'.format(clip)) clip_ds, clip_layer = VectorBase.path_sorter(clip) warp_options_obj = gdal.WarpOptions(dstSRS='EPSG:{}'.format(epsg), format='vrt', cutlineDSName=clip_ds, cutlineLayer=clip_layer, cropToCutline=True, **warp_options) else: warp_options_obj = gdal.WarpOptions(dstSRS='EPSG:{}'.format(epsg), format='vrt', **warp_options) ds = gdal.Warp(warpvrt, inraster, options=warp_options_obj) log.info( 'Using GDAL translate to convert VRT to compressed raster format.') translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine("-of Gtiff -co COMPRESS=DEFLATE")) gdal.Translate(outraster, ds, options=translateoptions) # Cleanup the temporary VRT file os.remove(warpvrt) if ds: log.info('Process completed successfully.') else: log.error('Error running GDAL Warp')
def vs_wm(full_raster, dest_folder, lrc, ulc, subset_size=0.5, name='water_mask_'): """ Parameters ---------- full_raster : string - file path Water mask or DEM covering full catchment. dest_folder : string - file path Folder to store subsets of full raster. llc : tuple (x, y) Lat lon coordinates of lower left corner. urc : tuple (x, y) Lat lon coordinates of upper right corner. Returns ------- The destination folder contains the raster subsets covering the entire area of interest. """ (x1, y0) = lrc (x0, y1) = ulc if x1 < x0 or y1 < y0: raise ValueError( 'Study area extent incorrect - please check coordinates.') xext = np.arange(x0, x1, subset_size * 2) yext = np.arange(y0, y1, subset_size * 2) ds = gdal.Open(full_raster) ulx, xres, xskew, uly, yskew, yres = ds.GetGeoTransform() lrx = ulx + (ds.RasterXSize * xres) lry = uly + (ds.RasterYSize * yres) if x1 < ulx or x0 > lrx or y1 < lry or y0 > uly: raise ValueError( 'Study area extent does not overlap with full raster ' + '- please check coordinates.') for x in xext: for y in yext: extent = " ".join([ str(x - subset_size), str(y + subset_size), str(x + subset_size), str(y - subset_size) ]) #ulx uly lrx lry translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine("-of Gtiff -co COMPRESS=LZW -projwin " + extent)) dst_filename = dest_folder + 'subset_' + str( int(np.round(x, 0)) ) + '_' + str( int(np.round(y, 0)) ) + '.tif' # The way the filename is made here should be consistent with what is done in s3_utils. This is a quick and dirty solution for subset_size = 0.5 but should probably be generalized. ds_out = gdal.Translate(dst_filename, ds, options=translateoptions) ds_out = None ds = None
#calcular para cada banda o min e max a clipar #calcular stats considerando a mascara! data = srcband.ReadAsArray() mask = srcband.GetMaskBand().ReadAsArray() masked_data = data[mask>0] min = masked_data.min() max = masked_data.max() media = masked_data.mean() stddev = masked_data.std() print ("[ STATS MASK ] = Minimum=%.3f, Maximum=%.3f, Mean=%.3f, StdDev=%.3f" % ( \ min, max, media, stddev )) min_scale = media - 2.8* stddev #usamos 2.8stddev porque se ajusta bem ao sentinel e é simples de calcular max_scale = media + 2.8* stddev print ("Escalas Banda %s: %s" % (band_num, [min_scale,max_scale, 0,255])) scales.append( [min_scale,max_scale, 0,255] ) scales_str = scales_str + " -scale_{} {} {} 0 255".format(band_num, min_scale, max_scale) srd_ds = None #aplicar stretch de std dev: Mean +- 2*std #usando o scale do gdal_translate print("A criar vrt com stretch e banda alfa: %s" % output_file) #trans_options = gdal.TranslateOptions(format="VRT", scaleParams=scales) #criar um vrt com banda alfa pq o mapserver nao trabalha bem com mascaras no tileindex! trans_options = gdal.TranslateOptions(gdal.ParseCommandLine("-of VRT -b 1 -b 2 -b 3 -b mask -colorinterp red,green,blue,alpha " + scales_str)) gdal.Translate(str(output_file), str(input_file), options=trans_options) viz_ds = gdal.Open( str(output_file), GA_Update ) alfaband = viz_ds.GetRasterBand(4) alfaband.SetRasterColorInterpretation( GCI_AlphaBand ) viz_ds = None
def bm_hd5_to_geotiff(hd5Folder, geotiffFolder): """ Based on NASA's Black Marble OpenHDF5.py""" """CONVERT A BATCH OF HD5 BLACK MARBLE IMAGES TO GEOTIFF Args: hd5Folder: Str, path of directory containing hd5 images to be converted geotiffFolder: Str, path of target directory to place geotiffs Returns: N/A """ #Check if suitable temprary directory is available, create one if not temp_check = os.path.join(os.getcwd(), 'temp_dir_for_hd5') if os.path.exists(temp_check): if os.path.isdir(temp_check): tempFolder = temp_check else: tempFolder = os.path.join(temp_check, str(round(time.time()))) os.mkdir(tempFolder) else: os.mkdir(temp_check) tempFolder = temp_check #Format relevant directory paths geotiffFolder = format_dir_nospace(geotiffFolder) tempFolder = format_dir_nospace(tempFolder) tempFolder_space = format_dir_space(tempFolder) ## List input raster files os.chdir(hd5Folder) rasterFiles = os.listdir(os.getcwd()) #Get File Name Prefix index = 1 totalLength = len(rasterFiles) for file in rasterFiles: rasterFilePre = file[:-3] print(rasterFilePre) fileExtension = "_BBOX.tif" ## Open HDF file hdflayer = gdal.Open(file, gdal.GA_ReadOnly) # # print (hdflayer.GetSubDatasets()) # Open raster layer for layer in hdflayer.GetSubDatasets(): #hdflayer.GetSubDatasets()[0][0] - for first layer #hdflayer.GetSubDatasets()[1][0] - for second layer ...etc subhdflayer = layer[0] rlayer = gdal.Open(subhdflayer, gdal.GA_ReadOnly) #Subset the Long Name and Generate Name of Temporary Files outputName = subhdflayer[92:] outputNameNoSpace = outputName.strip().replace(" ", "_").replace( "/", "_") outputNameFinal = rasterFilePre + outputNameNoSpace + fileExtension outputFolder = tempFolder_space outputRaster = outputFolder + outputNameFinal #Collect bounding box coordinates HorizontalTileNumber = int( rlayer.GetMetadata_Dict()["HorizontalTileNumber"]) VerticalTileNumber = int( rlayer.GetMetadata_Dict()["VerticalTileNumber"]) WestBoundCoord = (10 * HorizontalTileNumber) - 180 NorthBoundCoord = 90 - (10 * VerticalTileNumber) EastBoundCoord = WestBoundCoord + 10 SouthBoundCoord = NorthBoundCoord - 10 #Set projection EPSG = "-a_srs EPSG:4326" #WGS84 translateOptionText = EPSG + " -a_ullr " + str( WestBoundCoord) + " " + str(NorthBoundCoord) + " " + str( EastBoundCoord) + " " + str(SouthBoundCoord) translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine(translateOptionText)) #Generate layers as temporary raster files gdal.Translate(outputRaster, rlayer, options=translateoptions) #Combine temporary rasters into geotiff filepre = rasterFilePre commandtext = 'gdal_merge.py -separate -o ' + geotiffFolder + filepre + '.tif ' + tempFolder + filepre + '*tif' subprocess.call(commandtext, shell=True) #Remove temporary raster riles subprocess.call('rm ' + tempFolder + filepre + '*tif', shell=True) #Report on progress index += 1 percentageComplete = index / totalLength * 100 print(str(percentageComplete) + "% Complete") subprocess.call('rmdir ' + tempFolder, shell=True)
#import shapefile and get geometry countries = geopandas.read_file( r"C:\Users\akif.ortak\Desktop\World_Countries\gdam_world_borders_grid\gdam_world_borders_grid.shp" ) geom_country = countries.geometry #get list of image file path and their name ImageList = ListofExtensionAndName(r"D:\elevation_N60ton80", ".bil") for index, img in enumerate(ImageList[0], 0): #get image and calculate geometry ImageBoundry_result = ImageBoundry(ImageList[0][index]) gdf = geopandas.read_file(ImageBoundry_result) geom_gdf = gdf.geometry for i in range(len(geom_country)): intersect4 = geom_gdf.intersects(geom_country[i]) for i in intersect4: if i == True: targetimg = gdal.Open(img) #define your output path and extension outputname = r"D:\elevation_N60ton80_intersectResult\\" + ImageList[ 1][index] + ".tif" outputimg = gdal.Translate(outputname, targetimg) #gdal translate using in python code or jupyter. gdalinput = tile + '.tif' gdaloutput = oFolder + '\\' + Path(tile).name + '.tif' translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine("-ot Int16 -of Gtiff ")) gdal.Translate(gdaloutput, gdalinput, options=translateoptions)
import shutil # This is useful if you read netcdf gdal.UseExceptions() # our files src_nc = r"testdata/netcdfs/wrf_20200214_00_1.nc" dst_nc = r"testdata/netcdfs/out/viagdal.tiff" var_name = "t2_0" # our georeferencing points GCPList_gdal = [ GCP_g(6.830896, 50.69396, 0.0, 0.0, 0.0), GCP_g(6.830896, 28.65548, 0.0, 0.0, 816.0), GCP_g(52.5051, 28.65548, 0.0, 1690.0, 816.0), GCP_g(52.5051, 50.69396, 0.0, 1690.0, 0.0) ] # Watch it! You still need to set variable of netcdf src_raster = gdal.Open(f"NETCDF:{src_nc}:{var_name}") # gdal and warp parameters opts = "-of GTiff -gcp 0.0 0.0 6.8309 50.694 -gcp 0.0 816.0 6.8309 28.6555 -gcp 1690 816.0 52.5051 28.6555 -gcp 1690.0 0.0 52.5051 50.694" warp_opts = "-r near -order 1 -co COMPRESS=NONE" translateOptions = gdal.TranslateOptions(gdal.ParseCommandLine(opts)) warpOptiosn = gdal.WarpOptions(gdal.ParseCommandLine(warp_opts)) gdal.Translate(dst_nc, src_raster, options=translateOptions) # todo: I didnt solve warp yet. You can try it till I've done. gdal.Warp()