def test_gdal_translate_lib_104(): src_ds = gdal.GetDriverByName('MEM').Create('', 2, 2) src_ds.GetRasterBand(1).Fill(255) ds = gdal.Translate('', '../gcore/data/byte.tif', format='VRT', width=1, height=1) assert ds.GetRasterBand(1).Checksum() == 3, 'Bad checksum'
def stats_byte_partial_tiles(): ds = gdal.Translate( '/vsimem/stats_byte_tiled.tif', '../gdrivers/data/small_world.tif', creationOptions=['TILED=YES', 'BLOCKXSIZE=64', 'BLOCKYSIZE=64']) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.GetDriverByName('GTiff').Delete('/vsimem/stats_byte_tiled.tif') expected_stats = [0.0, 255.0, 50.22115, 67.119029288849973] if stats != expected_stats: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' # Same but with nodata set ds = gdal.Translate( '/vsimem/stats_byte_tiled.tif', '../gdrivers/data/small_world.tif', creationOptions=['TILED=YES', 'BLOCKXSIZE=64', 'BLOCKYSIZE=64']) ds.GetRasterBand(1).SetNoDataValue(0) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.GetDriverByName('GTiff').Delete('/vsimem/stats_byte_tiled.tif') expected_stats = [1.0, 255.0, 50.311081057390084, 67.14541389488096] expected_stats_32bit = [1.0, 255.0, 50.311081057390084, 67.145413894880946] if stats != expected_stats and stats != expected_stats_32bit: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' # Same but with nodata set but untiled and with non power of 16 block size ds = gdal.Translate('/vsimem/stats_byte_untiled.tif', '../gdrivers/data/small_world.tif', options='-srcwin 0 0 399 200') ds.GetRasterBand(1).SetNoDataValue(0) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.GetDriverByName('GTiff').Delete('/vsimem/stats_byte_untiled.tif') expected_stats = [1.0, 255.0, 50.378183963744554, 67.184793517649453] if stats != expected_stats: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/stats_byte_tiled.tif', 1000, 512, options=['TILED=YES', 'BLOCKXSIZE=512', 'BLOCKYSIZE=512']) ds.GetRasterBand(1).Fill(255) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.Unlink('/vsimem/stats_byte_tiled.tif') expected_stats = [255.0, 255.0, 255.0, 0.0] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' # Non optimized code path ds = gdal.GetDriverByName('MEM').Create('', 1, 1) ds.GetRasterBand(1).WriteRaster(0, 0, 1, 1, struct.pack('B' * 1, 1)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [1.0, 1.0, 1.0, 0.0] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' ds = gdal.GetDriverByName('MEM').Create('', 3, 5) ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, struct.pack('B' * 3, 20, 30, 50)) ds.GetRasterBand(1).WriteRaster(0, 1, 3, 1, struct.pack('B' * 3, 60, 10, 5)) ds.GetRasterBand(1).WriteRaster(0, 2, 3, 1, struct.pack('B' * 3, 10, 20, 0)) ds.GetRasterBand(1).WriteRaster(0, 3, 3, 1, struct.pack('B' * 3, 10, 20, 255)) ds.GetRasterBand(1).WriteRaster(0, 4, 3, 1, struct.pack('B' * 3, 10, 20, 10)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [0.0, 255.0, 35.333333333333336, 60.785597709398971] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' ds = gdal.GetDriverByName('MEM').Create('', 32 + 2, 2) ds.GetRasterBand(1).Fill(1) ds.GetRasterBand(1).WriteRaster(32, 1, 2, 1, struct.pack('B' * 2, 0, 255)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [0.0, 255.0, 4.7205882352941178, 30.576733555893391] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' ds = gdal.GetDriverByName('MEM').Create('', 32 + 2, 2) ds.GetRasterBand(1).Fill(1) ds.GetRasterBand(1).SetNoDataValue(2) ds.GetRasterBand(1).WriteRaster(32, 1, 2, 1, struct.pack('B' * 2, 0, 255)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [0.0, 255.0, 4.7205882352941178, 30.576733555893391] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' return 'success'
def Save(self, out_path, **gdaltranslate_opts): fname = gdaltranslate_opts.pop('fname') ext = 'GTiff' if out_path.endswith('.vrt'): ext = 'VRT' gdal.Translate(out_path, self.ds, format=ext, **gdaltranslate_opts)
def mrf_cached_source(): # Caching MRF gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format = 'MRF', creationOptions = ['CACHEDSOURCE=invalid_source', 'NOCOPY=TRUE']) ds = gdal.Open('/vsimem/out.mrf') with gdaltest.error_handler(): cs = ds.GetRasterBand(1).Checksum() expected_cs = 0 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None gdal.Unlink('/vsimem/out.mrf') gdal.Unlink('/vsimem/out.mrf.aux.xml') gdal.Unlink('/vsimem/out.idx') gdal.Unlink('/vsimem/out.ppg') gdal.Unlink('/vsimem/out.til') gdal.Unlink('tmp/byte.idx') gdal.Unlink('tmp/byte.ppg') open('tmp/byte.tif', 'wb').write(open('data/byte.tif', 'rb').read()) gdal.Translate('tmp/out.mrf', 'tmp/byte.tif', format = 'MRF', creationOptions = ['CACHEDSOURCE=byte.tif', 'NOCOPY=TRUE']) ds = gdal.Open('tmp/out.mrf') cs = ds.GetRasterBand(1).Checksum() expected_cs = 4672 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None gdal.Unlink('tmp/byte.tif') ds = gdal.Open('tmp/out.mrf') cs = ds.GetRasterBand(1).Checksum() expected_cs = 4672 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None # Caching MRF in mp_safe mode gdal.Unlink('tmp/out.mrf') gdal.Unlink('tmp/out.mrf.aux.xml') gdal.Unlink('tmp/out.idx') gdal.Unlink('tmp/out.ppg') gdal.Unlink('tmp/out.til') open('tmp/byte.tif', 'wb').write(open('data/byte.tif', 'rb').read()) open('tmp/out.mrf', 'wt').write( """<MRF_META> <CachedSource> <Source>byte.tif</Source> </CachedSource> <Raster mp_safe="on"> <Size x="20" y="20" c="1" /> <PageSize x="512" y="512" c="1" /> </Raster> <GeoTags> <BoundingBox minx="440720.00000000" miny="3750120.00000000" maxx="441920.00000000" maxy="3751320.00000000" /> <Projection>PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26711"]]</Projection> </GeoTags> </MRF_META>""") ds = gdal.Open('tmp/out.mrf') cs = ds.GetRasterBand(1).Checksum() expected_cs = 4672 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None gdal.Unlink('tmp/byte.tif') ds = gdal.Open('tmp/out.mrf') cs = ds.GetRasterBand(1).Checksum() expected_cs = 4672 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None # Cloning MRF open('tmp/cloning.mrf', 'wt').write( """<MRF_META> <CachedSource> <Source clone="true">out.mrf</Source> </CachedSource> <Raster> <Size x="20" y="20" c="1" /> <PageSize x="512" y="512" c="1" /> </Raster> <GeoTags> <BoundingBox minx="440720.00000000" miny="3750120.00000000" maxx="441920.00000000" maxy="3751320.00000000" /> <Projection>PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26711"]]</Projection> </GeoTags> </MRF_META>""") ds = gdal.Open('tmp/cloning.mrf') cs = ds.GetRasterBand(1).Checksum() expected_cs = 4672 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None gdal.Unlink('tmp/out.mrf') gdal.Unlink('tmp/out.mrf.aux.xml') gdal.Unlink('tmp/out.idx') gdal.Unlink('tmp/out.ppg') gdal.Unlink('tmp/out.til') ds = gdal.Open('tmp/cloning.mrf') cs = ds.GetRasterBand(1).Checksum() expected_cs = 4672 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None gdal.Unlink('tmp/cloning.mrf') gdal.Unlink('tmp/cloning.mrf.aux.xml') gdal.Unlink('tmp/cloning.idx') gdal.Unlink('tmp/cloning.ppg') gdal.Unlink('tmp/cloning.til') return 'success'
if 'scale' in output_format: options_list.append('-scale') if 'remove_mask' in output_format: options_list.append('-b mask') if 'remove_band_1' in output_format: options_list.append('-b 1') if 'remove_band_2' in output_format: options_list.append('-b 2') if 'remove_band_3' in output_format: options_list.append('-b 3') if 'remove_band_4' in output_format: options_list.append('-b 4') # reference: https://svn.osgeo.org/gdal/tags/gdal_1_1_8/html/formats_list.html output_extension= { 'JPG': '.jpg', 'JPEG200': '.jp2' 'PNG': '.png', 'GTiff': '.tif', 'ECW': '.ecw', 'GIF': '.gif' } options_string = " ".join(options_list) gdal.Translate(os.path.join(BASE_DIR, path), os.path.join(BASE_DIR, filename.replace(extension, output_extension[output_format])), options=options_string)
def geo_reference_raster_tile(x, y, z, path): bounds = tile_edges(x, y, z) # filename, extension = os.path.splitext(path) filename = output_dir + '/' + str(int(z)) + '_' + str(int(y)) + '_' + str( int(x)) + ".tif" gdal.Translate(filename, path, outputSRS='EPSG:4326', outputBounds=bounds)
def test_cog_overviews_co(): def my_cbk(pct, _, arg): assert pct >= tab[0] tab[0] = pct return 1 directory = '/vsimem/test_cog_overviews_co' filename = directory + '/cog.tif' src_ds = gdal.Translate('', 'data/byte.tif', options='-of MEM -outsize 2048 300') for val in ['NONE', 'FORCE_USE_EXISTING']: tab = [0] ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=['OVERVIEWS=' + val], callback=my_cbk, callback_data=tab) assert tab[0] == 1.0 assert ds ds = None ds = gdal.Open(filename) assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand( 1).Checksum() assert ds.GetRasterBand(1).GetOverviewCount() == 0 ds = None _check_cog(filename) for val in ['AUTO', 'IGNORE_EXISTING']: tab = [0] ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=['OVERVIEWS=' + val], callback=my_cbk, callback_data=tab) assert tab[0] == 1.0 assert ds ds = None ds = gdal.Open(filename) assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand( 1).Checksum() assert ds.GetRasterBand(1).GetOverviewCount() == 2 assert ds.GetRasterBand(1).GetOverview(0).Checksum() != 0 ds = None _check_cog(filename) # Add overviews to source src_ds.BuildOverviews('NONE', [2]) tab = [0] ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds, options=['OVERVIEWS=NONE'], callback=my_cbk, callback_data=tab) assert tab[0] == 1.0 assert ds ds = None ds = gdal.Open(filename) assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum() assert ds.GetRasterBand(1).GetOverviewCount() == 0 ds = None _check_cog(filename) tab = [0] ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=['OVERVIEWS=FORCE_USE_EXISTING'], callback=my_cbk, callback_data=tab) assert tab[0] == 1.0 assert ds ds = None ds = gdal.Open(filename) assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum() assert ds.GetRasterBand(1).GetOverviewCount() == 1 assert ds.GetRasterBand(1).GetOverview(0).Checksum() == 0 ds = None _check_cog(filename) tab = [0] ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=['OVERVIEWS=IGNORE_EXISTING'], callback=my_cbk, callback_data=tab) assert tab[0] == 1.0 assert ds ds = None ds = gdal.Open(filename) assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum() assert ds.GetRasterBand(1).GetOverviewCount() == 2 assert ds.GetRasterBand(1).GetOverview(0).Checksum() != 0 ds = None _check_cog(filename) src_ds = None gdal.GetDriverByName('GTiff').Delete(filename) gdal.Unlink(directory)
def test_gdal_translate_lib_rcp_vrt_path(): src_ds = gdal.Open('../gcore/data/rpc.vrt') ds = gdal.Translate('', src_ds, format='MEM', metadataOptions=['FOO=BAR']) assert ds.GetMetadata('RPC') == src_ds.GetMetadata('RPC')
def process_pol(in_file, rtc_name, out_name, pol, res, look_fact, match_flag, dead_flag, gamma_flag, filter_flag, pwr_flag, browse_res, dem, terms, par=None, area=False, orbit_file=None): logging.info("Processing the {} polarization".format(pol)) mgrd = "{out}.{pol}.mgrd".format(out=out_name, pol=pol) tif = "image_cal_map.mli.tif" # Ingest the granule into gamma format ingest_S1_granule(in_file, pol, look_fact, mgrd, orbit_file=orbit_file) width = getParameter("{}.par".format(mgrd), "range_samples") # Apply filter if requested if filter_flag: el_looks = look_fact * 30 execute(f"enh_lee {mgrd} temp.mgrd {width} {el_looks} 1 7 7", uselogging=True) shutil.move("temp.mgrd", mgrd) options = "-p -n {} -q -c ".format(terms) if gamma_flag: options += "-g " logging.info("Running RTC process... initializing") geo_dir = "geo_{}".format(pol) execute(f"mk_geo_radcal {mgrd} {mgrd}.par {dem} {dem}.par {geo_dir}/area.dem" f" {geo_dir}/area.dem_par {geo_dir} image {res} 0 {options}", uselogging=True) if match_flag and not par: fail = False logging.info("Running RTC process... coarse matching") try: execute(f"mk_geo_radcal {mgrd} {mgrd}.par {dem} {dem}.par {geo_dir}/area.dem" f" {geo_dir}/area.dem_par {geo_dir} image {res} 1 {options}", uselogging=True) except ExecuteError: logging.warning("WARNING: Determination of the initial offset failed, skipping initial offset") logging.info("Running RTC process... fine matching") try: execute(f"mk_geo_radcal {mgrd} {mgrd}.par {dem} {dem}.par {geo_dir}/area.dem" f" {geo_dir}/area.dem_par {geo_dir} image {res} 2 {options}", uselogging=True) except ExecuteError: if not dead_flag: logging.error("ERROR: Failed to match images") sys.exit(1) else: logging.warning("WARNING: Coregistration has failed; defaulting to dead reckoning") os.remove("{}/{}".format(geo_dir, "image.diff_par")) fail = True if not fail: try: check_coreg(out_name, res, max_offset=75, max_error=2.0) except CoregistrationError: if not dead_flag: logging.error("ERROR: Failed the coregistration check") sys.exit(1) else: logging.warning("WARNING: Coregistration check has failed; defaulting to dead reckoning") os.remove("{}/{}".format(geo_dir, "image.diff_par")) logging.info("Running RTC process... finalizing") if par: shutil.copy(par, "{}/image.diff_par".format(geo_dir)) execute(f"mk_geo_radcal {mgrd} {mgrd}.par {dem} {dem}.par {geo_dir}/area.dem" f" {geo_dir}/area.dem_par {geo_dir} image {res} 3 {options}", uselogging=True) os.chdir(geo_dir) # Divide sigma0 by sin(theta) to get beta0 execute(f"float_math image_0.inc_map - image_1.sin_theta {width} 7 - - 1 1 - 0") execute(f"float_math image_cal_map.mli image_1.sin_theta image_1.beta {width} 3 - - 1 1 - 0") execute(f"float_math image_1.beta image_0.sim image_1.flat {width} 3 - - 1 1 - 0") # Make Geotiff Files execute(f"data2geotiff area.dem_par image_0.ls_map 5 {out_name}.ls_map.tif", uselogging=True) execute(f"data2geotiff area.dem_par image_0.inc_map 2 {out_name}.inc_map.tif", uselogging=True) execute(f"data2geotiff area.dem_par image_1.flat 2 {out_name}.flat.tif", uselogging=True) execute("data2geotiff area.dem_par area.dem 2 outdem.tif", uselogging=True) gdal.Translate("{}.dem.tif".format(out_name), "outdem.tif", outputType=gdal.GDT_Int16) if gamma_flag: gdal.Translate("tmp.tif", tif, metadataOptions=['Band1={}_gamma0'.format(pol)]) else: gdal.Translate("tmp.tif", tif, metadataOptions=['Band1={}_sigma0'.format(pol)]) shutil.move("tmp.tif", tif) createAmp(tif, nodata=0) # Make meta files and stats execute(f"asf_import -format geotiff {out_name}.ls_map.tif ls_map", uselogging=True) execute("stats -overstat -overmeta ls_map", uselogging=True) execute(f"asf_import -format geotiff {out_name}.inc_map.tif inc_map", uselogging=True) execute("stats -overstat -overmeta -mask 0 inc_map", uselogging=True) execute(f"asf_import -format geotiff image_cal_map.mli_amp.tif tc_{pol}", uselogging=True) execute(f"stats -nostat -overmeta -mask 0 tc_{pol}", uselogging=True) # Make browse resolution tif file if res == browse_res: shutil.copy("image_cal_map.mli_amp.tif", "{}_{}_{}m.tif".format(out_name, pol, browse_res)) else: gdal.Translate("{}_{}_{}m.tif".format(out_name, pol, browse_res), "image_cal_map.mli_amp.tif", xRes=browse_res, yRes=browse_res) # Move files into the product directory out_dir = "../PRODUCT" if not os.path.exists(out_dir): os.mkdir(out_dir) if pwr_flag: shutil.move(tif, "{}/{}".format(out_dir, rtc_name)) else: copy_metadata(tif, "image_cal_map.mli_amp.tif") shutil.move("image_cal_map.mli_amp.tif", "{}/{}".format(out_dir, rtc_name)) shutil.move("{}.ls_map.tif".format(out_name), "{}/{}_ls_map.tif".format(out_dir, out_name)) shutil.move("{}.inc_map.tif".format(out_name), "{}/{}_inc_map.tif".format(out_dir, out_name)) shutil.move("{}.dem.tif".format(out_name), "{}/{}_dem.tif".format(out_dir, out_name)) if area: shutil.move("{}.flat.tif".format(out_name), "{}/{}_flat_{}.tif".format(out_dir, out_name, pol)) os.chdir("..")
import numpy as np import pandas as pd import matplotlib.pyplot as plt gdal.UseExceptions() gdal.AllRegister() #### stack layer data path_layer = r"D:\FORESTS2020\GITHUB\Plugin\GitTesis\TIF RAW\hudji" file_layer = glob.glob(path_layer + "/*.tif") # system('gdal_merge -o cidanau_stack.tif {fileraster}'.format(fileraster=file_layer)) # gm.main(['', '-o', 'cidanau_stack.tif', '{fileraster}'.format(fileraster=file_layer)]) file_vrt = path_layer + "/stacked.vrt" file_tif = path_layer + "/cidanau_stack.tif" vrt = gdal.BuildVRT(file_vrt, file_layer, separate=True) stack_layer = gdal.Translate(file_tif, vrt) ##### AOI_1 = gdal.Open(file_tif) AOI_2 = AOI_1.GetRasterBand(1).ReadAsArray() AOI = AOI_2 > 0 #### img_ds = gdal.Open(file_tif, gdal.GA_ReadOnly) img = np.zeros( (img_ds.RasterYSize, img_ds.RasterXSize, img_ds.RasterCount), gdal_array.GDALTypeCodeToNumericTypeCode(img_ds.GetRasterBand(1).DataType)) # print(img) for b in range(img.shape[2]): img[:, :, b] = img_ds.GetRasterBand(b + 1).ReadAsArray().astype(float) # roi = roi_ds.GetRasterBand(1).ReadAsArray().astype(np.uint8)
def gdal_proj_mercator( pathname: str, in_w: int, in_h: int, out_w: int, out_h: int, hrv: HRVpicture, sat: Satellite, zone: PixelZone ): """Proceed to a Mercator projection, thanks to GDAL. Arguments: pathname {str} -- the pathname of the tiff you want to project. in_w {int} -- width of the tiff file. in_h {int} -- height of the tiff file. out_w {int} -- width after projection. out_h {int} -- height after projection. hrv {HRVpicture} -- High Resolution Visible picture parameters. sat {Satellite} -- Satellite parameters. zone {PixelZone} -- A pixel zone from the HRV main picture. """ # Equatorial & Polar radius, in meters. r_equatorial = 6378169.0 r_polar = 6356583.8 # Actual corner latitude & longitude. xl = -hrv.psize * (zone.left - hrv.origin) yt = hrv.psize * (zone.top - hrv.origin) xr = -hrv.psize * (zone.right - 1 - hrv.origin) yb = hrv.psize * (zone.bot - 1 - hrv.origin) ## Check if the tiff pathname exists if not path.exists(pathname): print("ERROR: filename doesn't exists.") # Save tiff's name name = path.basename(pathname) # Save directories tr_path = "../.cache/gdal/translate/" wr_path = "../.cache/gdal/warp/" if not path.exists(tr_path): makedirs(tr_path) if not path.exists(wr_path): makedirs(wr_path) # Translate options opt_translate = gdal.TranslateOptions( srcWin=[0, 0, in_w, in_h], outputBounds=[xl, yt, xr, yb], outputSRS="+proj=geos" +" +a="+str(r_equatorial) +" +b="+str(r_polar) +" +lat_0="+str(sat.lat) +" +lon_0="+str(sat.lon) +" +h="+str(sat.height) +" +x_0=0 +y_0=0 +pm=0", ) # Warp options opt_warp = gdal.WarpOptions( width=out_w, height=out_h, srcSRS="+proj=geos" +" +a="+str(r_equatorial) +" +b="+str(r_polar) +" +lat_0="+str(sat.lat) +" +lon_0="+str(sat.lon) +" +h="+str(sat.height) +" +x_0=0 +y_0=0 +pm=0" +" +ulx="+str(xl) +" +uly="+str(yt) +" +lrx="+str(xr) +" +lry="+str(yb), dstSRS="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs", resampleAlg=gdal.GRIORA_Bilinear, multithread=True, ) # Proceed to mercator projection : gdal.Translate(tr_path+name, pathname, options=opt_translate) gdal.Warp(wr_path+name, tr_path+name, options=opt_warp) return wr_path+name
def rl2_22(): if gdaltest.rl2_drv is None: return 'skip' src_ds = gdal.Open('data/byte.tif') ds = ogr.GetDriverByName('SQLite').CreateDataSource( '/vsimem/rl2_22.rl2', options=['SPATIALITE=YES']) ds.CreateLayer('foo', None, ogr.wkbPoint) ds = None ds = gdaltest.rl2_drv.CreateCopy( '/vsimem/rl2_22.rl2', src_ds, options=['APPEND_SUBDATASET=YES', 'COVERAGE=byte']) if ds.GetRasterBand(1).Checksum() != 4672: gdaltest.post_reason('fail') print(ds.GetRasterBand(1).Checksum()) return 'fail' ds = None ds = gdal.OpenEx('/vsimem/rl2_22.rl2') if ds.RasterXSize != 20: gdaltest.post_reason('fail') return 'fail' if ds.GetLayerCount() != 1: gdaltest.post_reason('fail') return 'fail' left_ds = gdal.Translate('left', src_ds, srcWin=[0, 0, 10, 20], format='MEM') right_ds = gdal.Translate('', src_ds, srcWin=[10, 0, 10, 20], format='MEM') gdaltest.rl2_drv.CreateCopy('/vsimem/rl2_22.rl2', left_ds, options=['COVERAGE=left_right']) ds = gdaltest.rl2_drv.CreateCopy('/vsimem/rl2_22.rl2', right_ds, options=[ 'APPEND_SUBDATASET=YES', 'COVERAGE=left_right', 'SECTION=right' ]) if ds.GetRasterBand(1).Checksum() != 4672: gdaltest.post_reason('fail') print(ds.GetRasterBand(1).Checksum()) return 'fail' src_ds = gdal.Open('data/rgbsmall.tif') ds = gdaltest.rl2_drv.CreateCopy( '/vsimem/rl2_22.rl2', src_ds, options=['APPEND_SUBDATASET=YES', 'COVERAGE=rgbsmall']) if ds.GetRasterBand(1).Checksum() != src_ds.GetRasterBand(1).Checksum(): gdaltest.post_reason('fail') print(ds.GetRasterBand(1).Checksum()) return 'fail' ds = None gdal.Unlink('/vsimem/rl2_22.rl2') return 'success'
def test_rasterio_14(): gdal.FileFromMemBuffer( '/vsimem/rasterio_14.asc', """ncols 6 nrows 6 xllcorner 0 yllcorner 0 cellsize 0 0 0 100 0 0 0 0 100 0 0 0 100 0 0 0 0 100 0 100 0 100 0 0 0 0 100 0 100 0 0 0 0 0 0 0 100""") ds = gdal.Translate('/vsimem/rasterio_14_out.asc', '/vsimem/rasterio_14.asc', options='-of AAIGRID -r average -outsize 50% 50%') cs = ds.GetRasterBand(1).Checksum() assert cs == 110, ds.ReadAsArray() gdal.Unlink('/vsimem/rasterio_14.asc') gdal.Unlink('/vsimem/rasterio_14_out.asc') ds = gdal.GetDriverByName('MEM').Create('', 1000000, 1) ds.GetRasterBand(1).WriteRaster(ds.RasterXSize - 1, 0, 1, 1, struct.pack('B' * 1, 100)) data = ds.ReadRaster(buf_xsize=int(ds.RasterXSize / 2), buf_ysize=1, resample_alg=gdal.GRIORA_Average) data = struct.unpack('B' * int(ds.RasterXSize / 2), data) assert data[-1:][0] == 50 data = ds.ReadRaster(ds.RasterXSize - 2, 0, 2, 1, buf_xsize=1, buf_ysize=1, resample_alg=gdal.GRIORA_Average) data = struct.unpack('B' * 1, data) assert data[0] == 50 ds = gdal.GetDriverByName('MEM').Create('', 1, 1000000) ds.GetRasterBand(1).WriteRaster(0, ds.RasterYSize - 1, 1, 1, struct.pack('B' * 1, 100)) data = ds.ReadRaster(buf_xsize=1, buf_ysize=int(ds.RasterYSize / 2), resample_alg=gdal.GRIORA_Average) data = struct.unpack('B' * int(ds.RasterYSize / 2), data) assert data[-1:][0] == 50 data = ds.ReadRaster(0, ds.RasterYSize - 2, 1, 2, buf_xsize=1, buf_ysize=1, resample_alg=gdal.GRIORA_Average) data = struct.unpack('B' * 1, data) assert data[0] == 50
def test_gdal_translate_lib_geolocation_vrt_path(): src_ds = gdal.Open('../gcore/data/sstgeo.vrt') ds = gdal.Translate('/vsimem/temp.vrt', src_ds, format='VRT', metadataOptions=['FOO=BAR']) assert ds.GetMetadata('GEOLOCATION') == src_ds.GetMetadata('GEOLOCATION') gdal.Unlink('/vsimem/temp.vrt')
def test_vrtovr_virtual(): tif_tmpfilename = '/vsimem/temp.tif' src_ds = gdal.GetDriverByName('GTiff').Create(tif_tmpfilename, 20, 20, 3) src_ds.BuildOverviews('NEAR', [2, 4]) src_ds.GetRasterBand(1).Fill(200) src_ds.GetRasterBand(2).Fill(100) src_ds.GetRasterBand(1).GetOverview(0).Fill(100) src_ds.GetRasterBand(1).GetOverview(1).Fill(50) src_ds = None src_ds = gdal.Open(tif_tmpfilename) tmpfilename = '/vsimem/temp.vrt' vrt_ds = gdal.Translate(tmpfilename, src_ds, format='VRT') assert vrt_ds.GetRasterBand(1).GetOverviewCount( ) == 0 # we normally don't create implicit overviews on that small datasets with gdaltest.config_option('VRT_VIRTUAL_OVERVIEWS', 'YES'): vrt_ds.BuildOverviews('NEAR', [2, 4, 5, 50]) # level 50 is too big assert gdal.VSIStatL(tmpfilename + '.ovr') is None assert vrt_ds.GetRasterBand(1).GetOverviewCount() == 3 # Clean overviews with gdaltest.config_option('VRT_VIRTUAL_OVERVIEWS', 'YES'): vrt_ds.BuildOverviews('NONE', []) assert vrt_ds.GetRasterBand(1).GetOverviewCount() == 0 # Add in two steps with gdaltest.config_option('VRT_VIRTUAL_OVERVIEWS', 'YES'): vrt_ds.BuildOverviews('NEAR', [2, 4]) vrt_ds.BuildOverviews('NEAR', [5]) assert vrt_ds.GetRasterBand(1).GetOverviewCount() == 3 assert vrt_ds.GetRasterBand(1).GetOverview( 0).Checksum() == src_ds.GetRasterBand(1).GetOverview(0).Checksum() assert vrt_ds.GetRasterBand(1).GetOverview( 1).Checksum() == src_ds.GetRasterBand(1).GetOverview(1).Checksum() assert vrt_ds.ReadRaster(0, 0, 20, 20, 10, 10) == src_ds.ReadRaster(0, 0, 20, 20, 10, 10) assert vrt_ds.GetRasterBand(1).ReadRaster( 0, 0, 20, 20, 10, 10) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 20, 20, 10, 10) assert vrt_ds.GetRasterBand(1).ReadRaster( 0, 0, 20, 20, 5, 5) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 20, 20, 5, 5) assert struct.unpack( 'B' * 4 * 4, vrt_ds.GetRasterBand(1).ReadRaster(0, 0, 20, 20, 4, 4))[0] == 50 vrt_ds = None # Re-open VRT and re-run checks vrt_ds = gdal.Open(tmpfilename) assert vrt_ds.GetRasterBand(1).GetOverviewCount() == 3 assert vrt_ds.GetRasterBand(1).GetOverview( 0).Checksum() == src_ds.GetRasterBand(1).GetOverview(0).Checksum() assert vrt_ds.GetRasterBand(1).GetOverview( 1).Checksum() == src_ds.GetRasterBand(1).GetOverview(1).Checksum() assert vrt_ds.ReadRaster(0, 0, 20, 20, 10, 10) == src_ds.ReadRaster(0, 0, 20, 20, 10, 10) assert vrt_ds.GetRasterBand(1).ReadRaster( 0, 0, 20, 20, 10, 10) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 20, 20, 10, 10) assert vrt_ds.GetRasterBand(1).ReadRaster( 0, 0, 20, 20, 5, 5) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 20, 20, 5, 5) assert struct.unpack( 'B' * 4 * 4, vrt_ds.GetRasterBand(1).ReadRaster(0, 0, 20, 20, 4, 4))[0] == 50 gdal.Unlink(tmpfilename) gdal.Unlink(tif_tmpfilename)
def process_2nd_pol(in_file, rtc_name, cpol, res, look_fact, gamma_flag, filter_flag, pwr_flag, browse_res, outfile, dem, terms, par=None, area=False, orbit_file=None): if cpol == "VH": mpol = "VV" else: mpol = "HH" mgrd = "{out}.{pol}.mgrd".format(out=outfile, pol=cpol) tif = "image_cal_map.mli.tif" # Ingest the granule into gamma format ingest_S1_granule(in_file, cpol, look_fact, mgrd, orbit_file=orbit_file) width = getParameter("{}.par".format(mgrd), "range_samples") # Apply filtering if requested if filter_flag: el_looks = look_fact * 30 execute(f"enh_lee {mgrd} temp.mgrd {width} {el_looks} 1 7 7", uselogging=True) shutil.move("temp.mgrd", mgrd) options = "-p -n {} -q -c ".format(terms) if gamma_flag: options += "-g " home_dir = os.getcwd() geo_dir = "geo_{}".format(cpol) mdir = "geo_{}".format(mpol) if not os.path.isdir(geo_dir): os.mkdir(geo_dir) shutil.copy("geo_{}/image.diff_par".format(mpol), "{}".format(geo_dir)) os.symlink("../geo_{}/image_0.map_to_rdc".format(mpol), "{}/image_0.map_to_rdc".format(geo_dir)) os.symlink("../geo_{}/image_0.ls_map".format(mpol), "{}/image_0.ls_map".format(geo_dir)) os.symlink("../geo_{}/image_0.inc_map".format(mpol), "{}/image_0.inc_map".format(geo_dir)) os.symlink("../geo_{}/image_0.sim".format(mpol), "{}/image_0.sim".format(geo_dir)) os.symlink("../geo_{}/area.dem_par".format(mpol), "{}/area.dem_par".format(geo_dir)) if par: shutil.copy(par, "{}/image.diff_par".format(geo_dir)) execute(f"mk_geo_radcal {mgrd} {mgrd}.par {dem} {dem}.par {mdir}/area.dem" f" {mdir}/area.dem_par {geo_dir} image {res} 3 {options}", uselogging=True) os.chdir(geo_dir) # Divide sigma0 by sin(theta) to get beta0 execute(f"float_math image_0.inc_map - image_1.sin_theta {width} 7 - - 1 1 - 0") execute(f"float_math image_cal_map.mli image_1.sin_theta image_1.beta {width} 3 - - 1 1 - 0") execute(f"float_math image_1.beta image_0.sim image_1.flat {width} 3 - - 1 1 - 0") # Make geotiff file if gamma_flag: gdal.Translate("tmp.tif", tif, metadataOptions=['Band1={}_gamma0'.format(cpol)]) else: gdal.Translate("tmp.tif", tif, metadataOptions=['Band1={}_sigma0'.format(cpol)]) shutil.move("tmp.tif", tif) # Make browse resolution file createAmp(tif, nodata=0) if res == browse_res: shutil.copy("image_cal_map.mli_amp.tif", "{}_{}_{}m.tif".format(outfile, cpol, browse_res)) else: gdal.Translate("{}_{}_{}m.tif".format(outfile, cpol, browse_res), "image_cal_map.mli_amp.tif", xRes=browse_res, yRes=browse_res) # Create meta files and stats execute(f"asf_import -format geotiff image_cal_map.mli_amp.tif tc_{cpol}", uselogging=True) execute(f"stats -nostat -overmeta -mask 0 tc_{cpol}", uselogging=True) # Move files to product directory out_dir = "../PRODUCT" if not os.path.exists(out_dir): os.mkdir(out_dir) execute(f"data2geotiff area.dem_par image_1.flat 2 {outfile}.flat.tif", uselogging=True) if pwr_flag: shutil.move(tif, "{}/{}".format(out_dir, rtc_name)) else: copy_metadata(tif, "image_cal_map.mli_amp.tif") shutil.move("image_cal_map.mli_amp.tif", "{}/{}".format(out_dir, rtc_name)) if area: shutil.move("{}.flat.tif".format(outfile), "{}/{}_flat_{}.tif".format(out_dir, rtc_name, cpol)) os.chdir(home_dir)
def wcs_6(): driver = gdal.GetDriverByName('WCS') if driver is None: return 'skip' # Generating various URLs from the driver and comparing them to ones # that have worked. first_call = True size = 60 cache = 'CACHE=wcs_cache' global urls urls = read_urls() (process, port) = webserver.launch(handler=WCSHTTPHandler) url = "http://127.0.0.1:" + str(port) setup = setupFct() servers = [] for server in setup: servers.append(server) for server in sorted(servers): for i, v in enumerate(setup[server]['Versions']): version = str(int(v / 100)) + '.' + str(int( v % 100 / 10)) + '.' + str((v % 10)) if not server + '-' + version in urls: print("Error: " + server + '-' + version + " not in urls") global wcs_6_ok wcs_6_ok = False continue options = [cache] if first_call: options.append('CLEAR_CACHE') first_call = False query = 'server=' + server + '&version=' + version ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query, open_options=options) coverage = setup[server]['Coverage'] if isinstance(coverage, list): coverage = coverage[i] if isinstance(coverage, numbers.Number): coverage = str(coverage) query += '&coverage=' + coverage options = [cache] if isinstance(setup[server]['Options'], list): oo = setup[server]['Options'][i] else: oo = setup[server]['Options'] oo = oo.split() for o in oo: if o != '-oo': options.append(o) options.append('GetCoverageExtra=test=none') ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query, open_options=options) ds = 0 options = [cache] options.append('GetCoverageExtra=test=scaled') options.append('INTERLEAVE=PIXEL') ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query, open_options=options) if not ds: print("OpenEx failed: WCS:" + url + "/?" + query) global wcs_6_ok wcs_6_ok = False break projwin = setup[server]['Projwin'].replace('-projwin ', '').split() for i, c in enumerate(projwin): projwin[i] = int(c) options = [cache] tmpfile = "tmp/" + server + version + ".tiff" gdal.Translate(tmpfile, ds, projWin=projwin, width=size, options=options) os.remove(tmpfile) if os.path.isfile('data/wcs/' + server + '-' + version + '-non_scaled.tiff'): options = [cache] options.append('GetCoverageExtra=test=non_scaled') options.append('INTERLEAVE=PIXEL') ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query, open_options=options) if not ds: print("OpenEx failed: WCS:" + url + "/?" + query) global wcs_6_ok wcs_6_ok = False break options = [cache] gdal.Translate(tmpfile, ds, srcWin=[0, 0, 2, 2], options=options) os.remove(tmpfile) else: print(server + ' ' + version + ' non_scaled skipped (no response file)') webserver.server_stop(process, port) if wcs_6_ok: return 'success' else: return 'fail'
def load_file(self, filename, env): ds, imfilename = self._gdalds(filename) if ds.RasterCount not in (3, 4): raise ValidationError(_("Only RGB and RGBA rasters are supported.")) dsdriver = ds.GetDriver() dsproj = ds.GetProjection() dsgtran = ds.GetGeoTransform() if dsdriver.ShortName not in DRIVERS.enum: raise ValidationError( _("Raster has format '%(format)s', however only following formats are supported: %(all_formats)s.") # NOQA: E501 % dict(format=dsdriver.ShortName, all_formats=", ".join(SUPPORTED_DRIVERS)) ) if not dsproj or not dsgtran: raise ValidationError(_("Raster files without projection info are not supported.")) data_type = None alpha_band = None has_nodata = None for bidx in range(1, ds.RasterCount + 1): band = ds.GetRasterBand(bidx) if data_type is None: data_type = band.DataType elif data_type != band.DataType: raise ValidationError(_("Complex data types are not supported.")) if band.GetRasterColorInterpretation() == gdal.GCI_AlphaBand: assert alpha_band is None, "Multiple alpha bands found!" alpha_band = bidx else: has_nodata = (has_nodata is None or has_nodata) and ( band.GetNoDataValue() is not None) src_osr = osr.SpatialReference() src_osr.ImportFromWkt(dsproj) dst_osr = osr.SpatialReference() dst_osr.ImportFromEPSG(int(self.resource.srs.id)) reproject = not src_osr.IsSame(dst_osr) info = gdal.Info(imfilename, format='json') geom = Geometry.from_geojson(info['wgs84Extent']) self.footprint = ga.elements.WKBElement(bytearray(geom.wkb), srid=4326) self.fileobj = env.file_storage.fileobj(component='raster_mosaic') dst_file = env.raster_mosaic.workdir_filename(self.fileobj, makedirs=True) co = ['COMPRESS=DEFLATE', 'TILED=YES', 'BIGTIFF=YES'] if reproject: gdal.Warp( dst_file, imfilename, options=gdal.WarpOptions( format='GTiff', dstSRS='EPSG:%d' % self.resource.srs.id, dstAlpha=not has_nodata and alpha_band is None, creationOptions=co, ), ) else: gdal.Translate( dst_file, imfilename, options=gdal.TranslateOptions( format='GTiff', creationOptions=co ) ) self.build_overview()
def test_cog_resampling_options(): filename = '/vsimem/test_cog_resampling_options.tif' src_ds = gdal.Open('data/byte.tif') ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=[ 'TILING_SCHEME=GoogleMapsCompatible', 'WARP_RESAMPLING=NEAREST' ]) cs1 = ds.GetRasterBand(1).Checksum() ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=[ 'TILING_SCHEME=GoogleMapsCompatible', 'WARP_RESAMPLING=CUBIC' ]) cs2 = ds.GetRasterBand(1).Checksum() ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=[ 'TILING_SCHEME=GoogleMapsCompatible', 'RESAMPLING=NEAREST', 'WARP_RESAMPLING=CUBIC' ]) cs3 = ds.GetRasterBand(1).Checksum() assert cs1 != cs2 assert cs2 == cs3 src_ds = gdal.Translate('', 'data/byte.tif', options='-of MEM -outsize 129 0') ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=['BLOCKSIZE=128', 'OVERVIEW_RESAMPLING=NEAREST']) cs1 = ds.GetRasterBand(1).GetOverview(0).Checksum() ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=['BLOCKSIZE=128', 'OVERVIEW_RESAMPLING=BILINEAR']) cs2 = ds.GetRasterBand(1).GetOverview(0).Checksum() ds = gdal.GetDriverByName('COG').CreateCopy( filename, src_ds, options=[ 'BLOCKSIZE=128', 'RESAMPLING=NEAREST', 'OVERVIEW_RESAMPLING=BILINEAR' ]) cs3 = ds.GetRasterBand(1).GetOverview(0).Checksum() assert cs1 != cs2 assert cs2 == cs3 ds = None gdal.Unlink(filename)
path = result_path + dirLoc + '/' if not os.path.exists(path): os.mkdir(path) src_path = tiles_path + dirLoc + '/Sigma0_filtered_db.data/' # going through the images liste_vv = {} liste_vh = {} for file in os.listdir(src_path): if file.endswith('.img'): date = translateDate(file[-16:-7]) pol = file[7:9] if pol == 'VV': liste_vv[date] = file else: liste_vh[date] = file # vv-vh calculation and GTiff conversion for (date, vh) in liste_vh.items(): vv = liste_vv[date] output_vv = path + 'sigma0_db_vv_' + dirLoc + '_' + date + '.tif' output_vh = path + 'sigma0_db_vh_' + dirLoc + '_' + date + '.tif' output_vvvh = path + 'sigma0_db_vvvh_' + dirLoc + '_' + date + '.tif' gdal_calc.Calc(calc="B-A", A=src_path + vh, B=src_path + vv, outfile=output_vvvh, overwrite=True) gdal.Translate(output_vh, src_path + vh, format="GTiff") gdal.Translate(output_vv, src_path + vv, format="GTiff") print_('rm ' + tiles_path + dirLoc) shutil.rmtree(tiles_path + dirLoc)
def test_cog_northing_easting_and_non_power_of_two_ratios(): filename = '/vsimem/cog.tif' x0_NZTM2000 = -1000000 y0_NZTM2000 = 10000000 blocksize = 256 scale_denom_zoom_level_14 = 1000 scale_denom_zoom_level_13 = 2500 scale_denom_zoom_level_12 = 5000 ds = gdal.Translate( filename, 'data/byte.tif', options= '-of COG -a_srs EPSG:2193 -a_ullr 1000001 5000001 1000006.6 4999995.4 -co TILING_SCHEME=NZTM2000 -co ALIGNED_LEVELS=2' ) assert ds.RasterXSize == 1280 assert ds.RasterYSize == 1280 b = ds.GetRasterBand(1) assert [(b.GetOverview(i).XSize, b.GetOverview(i).YSize) for i in range(b.GetOverviewCount())] == [(512, 512), (256, 256)] gt = ds.GetGeoTransform() res_zoom_level_14 = scale_denom_zoom_level_14 * 0.28e-3 # According to OGC Tile Matrix Set formula assert gt == pytest.approx( (999872, res_zoom_level_14, 0, 5000320, 0, -res_zoom_level_14), abs=1e-8) # Check that gt origin matches the corner of a tile at zoom 14 res = gt[1] tile_x = (gt[0] - x0_NZTM2000) / (blocksize * res) assert tile_x == pytest.approx(round(tile_x)) tile_y = (y0_NZTM2000 - gt[3]) / (blocksize * res) assert tile_y == pytest.approx(round(tile_y)) # Check that overview=0 corresponds to the resolution of zoom level=13 / OGC ScaleDenom = 2500 ovr0_xsize = b.GetOverview(0).XSize assert float(ovr0_xsize) / ds.RasterXSize == float( scale_denom_zoom_level_14) / scale_denom_zoom_level_13 # Check that gt origin matches the corner of a tile at zoom 13 ovr0_res = res * scale_denom_zoom_level_13 / scale_denom_zoom_level_14 tile_x = (gt[0] - x0_NZTM2000) / (blocksize * ovr0_res) assert tile_x == pytest.approx(round(tile_x)) tile_y = (y0_NZTM2000 - gt[3]) / (blocksize * ovr0_res) assert tile_y == pytest.approx(round(tile_y)) # Check that overview=1 corresponds to the resolution of zoom level=12 / OGC ScaleDenom = 5000 ovr1_xsize = b.GetOverview(1).XSize assert float(ovr1_xsize) / ds.RasterXSize == float( scale_denom_zoom_level_14) / scale_denom_zoom_level_12 # Check that gt origin matches the corner of a tile at zoom 12 ovr1_res = res * scale_denom_zoom_level_12 / scale_denom_zoom_level_14 tile_x = (gt[0] - x0_NZTM2000) / (blocksize * ovr1_res) assert tile_x == pytest.approx(round(tile_x)) tile_y = (y0_NZTM2000 - gt[3]) / (blocksize * ovr1_res) assert tile_y == pytest.approx(round(tile_y)) assert ds.GetMetadata("TILING_SCHEME") == { "NAME": "NZTM2000", "ZOOM_LEVEL": "14", "ALIGNED_LEVELS": "2" } ds = None gdal.GetDriverByName('GTiff').Delete(filename)
def applyverticalshiftgrid_1(): src_ds = gdal.Open('../gcore/data/byte.tif') src_ds = gdal.Translate('', src_ds, format='MEM', width=20, height=40) grid_ds = gdal.Translate('', src_ds, format='MEM') out_ds = gdal.ApplyVerticalShiftGrid(src_ds, grid_ds) if out_ds.GetRasterBand(1).DataType != gdal.GDT_Byte: gdaltest.post_reason('fail') print(out_ds.GetRasterBand(1).DataType) return 'fail' if out_ds.RasterXSize != src_ds.RasterXSize: gdaltest.post_reason('fail') print(out_ds.RasterXSize) return 'fail' if out_ds.RasterYSize != src_ds.RasterYSize: gdaltest.post_reason('fail') print(out_ds.RasterYSize) return 'fail' if out_ds.GetGeoTransform() != src_ds.GetGeoTransform(): gdaltest.post_reason('fail') print(out_ds.GetGeoTransform()) return 'fail' if out_ds.GetProjectionRef() != src_ds.GetProjectionRef(): gdaltest.post_reason('fail') print(out_ds.GetProjectionRef()) return 'fail' # Check that we can drop the reference to the sources src_ds = None grid_ds = None cs = out_ds.GetRasterBand(1).Checksum() if cs != 10038: gdaltest.post_reason('fail') print(cs) return 'fail' src_ds = gdal.Open('../gcore/data/byte.tif') src_ds = gdal.Translate('', src_ds, format='MEM', width=20, height=40) # Test block size out_ds = gdal.ApplyVerticalShiftGrid(src_ds, src_ds, options=['BLOCKSIZE=15']) cs = out_ds.GetRasterBand(1).Checksum() if cs != 10038: gdaltest.post_reason('fail') print(cs) return 'fail' # Inverse transformer out_ds = gdal.ApplyVerticalShiftGrid(src_ds, src_ds, True, options=['DATATYPE=Float32']) if out_ds.GetRasterBand(1).DataType != gdal.GDT_Float32: gdaltest.post_reason('fail') print(out_ds.GetRasterBand(1).DataType) return 'fail' cs = out_ds.GetRasterBand(1).Checksum() if cs != 0: gdaltest.post_reason('fail') print(cs) return 'fail' return 'success'
def mrf_versioned(): gdal.Unlink('/vsimem/out.mrf') gdal.Unlink('/vsimem/out.mrf.aux.xml') gdal.Unlink('/vsimem/out.idx') gdal.Unlink('/vsimem/out.ppg') gdal.Unlink('/vsimem/out.til') # Caching MRF gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format = 'MRF' ) gdal.FileFromMemBuffer('/vsimem/out.mrf', """<MRF_META> <Raster versioned="on"> <Size x="20" y="20" c="1" /> <PageSize x="512" y="512" c="1" /> </Raster> <GeoTags> <BoundingBox minx="440720.00000000" miny="3750120.00000000" maxx="441920.00000000" maxy="3751320.00000000" /> <Projection>PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26711"]]</Projection> </GeoTags> </MRF_META>""") ds = gdal.Open('/vsimem/out.mrf', gdal.GA_Update) ds.GetRasterBand(1).Fill(0) ds = None ds = gdal.Open('/vsimem/out.mrf') cs = ds.GetRasterBand(1).Checksum() expected_cs = 0 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None ds = gdal.Open('/vsimem/out.mrf:MRF:V0') cs = ds.GetRasterBand(1).Checksum() expected_cs = 0 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None ds = gdal.Open('/vsimem/out.mrf:MRF:V1') cs = ds.GetRasterBand(1).Checksum() expected_cs = 4672 if cs != expected_cs: gdaltest.post_reason('fail') print(cs) print(expected_cs) return 'fail' ds = None with gdaltest.error_handler(): ds = gdal.Open('/vsimem/out.mrf:MRF:V2') if ds is not None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/out.mrf') gdal.Unlink('/vsimem/out.mrf.aux.xml') gdal.Unlink('/vsimem/out.idx') gdal.Unlink('/vsimem/out.ppg') gdal.Unlink('/vsimem/out.til') return 'success'
""" Cut image into mosaic rows and columns with one pixel overlap """ import sys from osgeo import gdal if len(sys.argv) < 2: print("Usage: {} image_file rows cols".format(sys.argv[0])) print(" rows and cols are optional default value is 4 for both") sys.exit(0) DS = gdal.Open(sys.argv[1]) # load input dataset if DS is None: print("Input dataset not found or not readable") sys.exit(1) WIDTH = DS.RasterXSize # get image size HEIGHT = DS.RasterYSize ROWS = COLS = 4 if len(sys.argv) > 2: # get number of mosaic rows from command line ROWS = int(sys.argv[2]) if len(sys.argv) > 3: # get number of mosaic cols from command line COLS = int(sys.argv[3]) ROW_STEP = int(HEIGHT / ROWS) # row height ROW_STEP1 = ROW_STEP + 1 # one pixel overlap between rows COL_STEP = int(WIDTH / COLS) # col width COL_STEP1 = COL_STEP + 1 # one pixel overlap between rows for j in range(0, WIDTH - COL_STEP + 1, COL_STEP): for i in range(0, HEIGHT - ROW_STEP + 1, ROW_STEP): name = "mosaic_{}_{}.tif".format(j, i) options = "-srcwin {} {} {} {}".format(j, i, COL_STEP1, ROW_STEP1) gdal.Translate(name, DS, options=options)
def download_srtm(LLLON, LLLAT, URLON, URLAT, output_directory='./data/reference_dem/', verbose=True): # TODO # - Add function to determine extent automatically from input cameras # - Make geoid adjustment and converstion to UTM optional import elevation run_command(['eio', 'selfcheck'], verbose=verbose) print('Downloading SRTM DEM data...') hsfm.io.create_dir(output_dir) cache_dir = output_dir product = 'SRTM3' dem_bounds = (LLLON, LLLAT, URLON, URLAT) elevation.seed(bounds=dem_bounds, cache_dir=cache_dir, product=product, max_download_tiles=999) tifs = glob.glob(os.path.join(output_dir, 'SRTM3/cache/', '*tif')) vrt_file_name = os.path.join(output_dir, 'SRTM3/cache/srtm.vrt') call = ['gdalbuildvrt', vrt_file_name] call.extend(tifs) run_command(call, verbose=verbose) ds = gdal.Open(vrt_file_name) vrt_subset_file_name = os.path.join(output_dir, 'SRTM3/cache/srtm_subset.vrt') ds = gdal.Translate(vrt_subset_file_name, ds, projWin=[LLLON, URLAT, URLON, LLLAT]) # Adjust from EGM96 geoid to WGS84 ellipsoid adjusted_vrt_subset_file_name_prefix = os.path.join( output_dir, 'SRTM3/cache/srtm_subset') call = [ 'dem_geoid', '--reverse-adjustment', vrt_subset_file_name, '-o', adjusted_vrt_subset_file_name_prefix ] run_command(call, verbose=verbose) adjusted_vrt_subset_file_name = adjusted_vrt_subset_file_name_prefix + '-adj.tif' # Get UTM EPSG code epsg_code = hsfm.geospatial.wgs_lon_lat_to_epsg_code(LLLON, LLLAT) # Convert to UTM utm_vrt_subset_file_name = os.path.join( output_dir, 'SRTM3/cache/srtm_subset_utm_geoid_adj.tif') call = 'gdalwarp -co COMPRESS=LZW -co TILED=YES -co BIGTIFF=IF_SAFER -dstnodata -9999 -r cubic -t_srs EPSG:' + epsg_code call = call.split() call.extend([adjusted_vrt_subset_file_name, utm_vrt_subset_file_name]) run_command(call, verbose=verbose) # Cleanup print('Cleaning up...', 'Reference DEM available at', out) out = os.path.join(output_dir, os.path.split(utm_vrt_subset_file_name)[-1]) os.rename(utm_vrt_subset_file_name, out) shutil.rmtree(os.path.join(output_dir, 'SRTM3/')) return out
def create_route_texture(dem_file, gpx_path, debugging=False): filename = gpx_path.split("/")[-1].split(".")[0] folder = "exports/%s/texture" % filename if debugging: im_path = "%s/%s-texture-debug.png" % (folder, filename) else: im_path = "%s/%s-texture.png" % (folder, filename) try: os.mkdir(folder) except FileExistsError: pass texture_bounds_path = "%s/%s-texture-bounds.pkl" % (folder, filename) texture_exist = os.path.isfile("%s" % im_path) bounds_exist = os.path.isfile("%s" % texture_bounds_path) if texture_exist and bounds_exist: with open(texture_bounds_path, "rb") as f: tex_bounds = pickle.load(f) return [im_path, tex_bounds] p_i(f"Creating route texture for {filename}") mns, minimums, maximums = read_hike_gpx(gpx_path) ds_raster = rasterio.open(dem_file) crs = int(ds_raster.crs.to_authority()[1]) converter = LatLngToCrs(crs) lower_left = converter.convert(minimums[0].latitude, minimums[1].longitude) upper_right = converter.convert(maximums[0].latitude, maximums[1].longitude) bbox = ( lower_left.GetX(), upper_right.GetY(), upper_right.GetX(), lower_left.GetY(), ) gdal.Translate(f"{folder}/{filename}-output_crop_raster.tif", dem_file, projWin=bbox) im = cv2.imread(f"{folder}/{filename}-output_crop_raster.tif") h, w, _ = im.shape rs = 1 if debugging: rs = 20 multiplier = 100 h = h * multiplier w = w * multiplier if not mns: return ["", ""] img = np.ones([h, w, 4], dtype=np.uint8) ds_raster = rasterio.open(dem_file) crs = int(ds_raster.crs.to_authority()[1]) b = ds_raster.bounds bounds = [b.left, b.bottom, b.right, b.top] converter = LatLngToCrs(crs) locs = [ convert_single_coordinate_pair(bounds, converter, i.latitude, i.longitude) for i in mns ] prev_lat = abs(int(((100.0 * locs[0][0]) / 100) * w)) prev_lon = h - abs(int(100.0 - ((100.0 * locs[0][1]) / 100.0) * h)) for i in locs: lat, lon = i x = h - abs(int(100.0 - ((100.0 * lon) / 100.0) * h)) y = abs(int(((100.0 * lat) / 100.0) * w)) cv2.line(img, (prev_lat, prev_lon), (y, x), (0, 0, 255, 255), 3 * rs) prev_lat, prev_lon = y, x min_lat_p = minimums[0] min_lon_p = minimums[1] max_lat_p = maximums[0] max_lon_p = maximums[1] min_x = convert_single_coordinate_pair(bounds, converter, min_lat_p.latitude, min_lat_p.longitude) min_y = convert_single_coordinate_pair(bounds, converter, min_lon_p.latitude, min_lon_p.longitude) max_x = convert_single_coordinate_pair(bounds, converter, max_lat_p.latitude, max_lat_p.longitude) max_y = convert_single_coordinate_pair(bounds, converter, max_lon_p.latitude, max_lon_p.longitude) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) _, thresh = cv2.threshold(gray, 1, 255, cv2.THRESH_BINARY) contours, _ = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) cnt = contours[0] x, y, w, h = cv2.boundingRect(cnt) crop = img[y:y + h, x:x + w] cv2.imwrite(im_path, crop) tex_bounds = TextureBounds( min_lat=min_lat_p, min_lon=min_lon_p, max_lat=max_lat_p, max_lon=max_lon_p, min_x=min_x, min_y=min_y, max_x=max_x, max_y=max_y, ) p_i("Route texture complete") with open(texture_bounds_path, "wb") as f: pickle.dump(tex_bounds, f) subprocess.call( ["rm", "-r", f"{folder}/{filename}-output_crop_raster.tif"]) return [im_path, tex_bounds]
def stats_uint16(): ds = gdal.Translate( '/vsimem/stats_uint16_tiled.tif', '../gdrivers/data/small_world.tif', outputType=gdal.GDT_UInt16, scaleParams=[[0, 255, 0, 65535]], creationOptions=['TILED=YES', 'BLOCKXSIZE=64', 'BLOCKYSIZE=64']) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.GetDriverByName('GTiff').Delete('/vsimem/stats_uint16_tiled.tif') expected_stats = [ 0.0, 65535.0, 50.22115 * 65535 / 255, 67.119029288849973 * 65535 / 255 ] if stats != expected_stats: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' ds = gdal.Translate( '/vsimem/stats_uint16_untiled.tif', '../gdrivers/data/small_world.tif', options='-srcwin 0 0 399 200 -scale 0 255 0 65535 -ot UInt16') stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.GetDriverByName('GTiff').Delete('/vsimem/stats_uint16_untiled.tif') expected_stats = [0.0, 65535.0, 12923.9921679198, 17259.703026841547] if stats != expected_stats: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' # Same but with nodata set but untiled and with non power of 16 block size ds = gdal.Translate( '/vsimem/stats_uint16_untiled.tif', '../gdrivers/data/small_world.tif', options='-srcwin 0 0 399 200 -scale 0 255 0 65535 -ot UInt16') ds.GetRasterBand(1).SetNoDataValue(0) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.GetDriverByName('GTiff').Delete('/vsimem/stats_uint16_untiled.tif') expected_stats = [ 257.0, 65535.0, 50.378183963744554 * 65535 / 255, 67.184793517649453 * 65535 / 255 ] if stats != expected_stats: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' for fill_val in [0, 1, 32767, 32768, 65535]: ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/stats_uint16_tiled.tif', 1000, 512, 1, gdal.GDT_UInt16, options=['TILED=YES', 'BLOCKXSIZE=512', 'BLOCKYSIZE=512']) ds.GetRasterBand(1).Fill(fill_val) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None gdal.Unlink('/vsimem/stats_uint16_tiled.tif') expected_stats = [fill_val, fill_val, fill_val, 0.0] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(fill_val) print(expected_stats) return 'fail' # Test remaining pixels after multiple of 32 ds = gdal.GetDriverByName('MEM').Create('', 32 + 2, 1, 1, gdal.GDT_UInt16) ds.GetRasterBand(1).Fill(1) ds.GetRasterBand(1).WriteRaster(32, 0, 2, 1, struct.pack('H' * 2, 0, 65535)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [0.0, 65535.0, 1928.4411764705883, 11072.48066469611] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' # Non optimized code path for fill_val in [0, 1, 32767, 32768, 65535]: ds = gdal.GetDriverByName('MEM').Create('', 1, 1, 1, gdal.GDT_UInt16) ds.GetRasterBand(1).WriteRaster(0, 0, 1, 1, struct.pack('H' * 1, fill_val)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [fill_val, fill_val, fill_val, 0.0] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(fill_val) print(expected_stats) return 'fail' ds = gdal.GetDriverByName('MEM').Create('', 3, 5, 1, gdal.GDT_UInt16) ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, struct.pack('H' * 3, 20, 30, 50)) ds.GetRasterBand(1).WriteRaster(0, 1, 3, 1, struct.pack('H' * 3, 60, 10, 5)) ds.GetRasterBand(1).WriteRaster(0, 2, 3, 1, struct.pack('H' * 3, 10, 20, 0)) ds.GetRasterBand(1).WriteRaster(0, 3, 3, 1, struct.pack('H' * 3, 10, 20, 65535)) ds.GetRasterBand(1).WriteRaster(0, 4, 3, 1, struct.pack('H' * 3, 10, 20, 10)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [0.0, 65535.0, 4387.333333333333, 16342.408927558861] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) print(expected_stats) return 'fail' ds = gdal.GetDriverByName('MEM').Create('', 2, 2, 1, gdal.GDT_UInt16) ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, struct.pack('H' * 2, 0, 65535)) ds.GetRasterBand(1).WriteRaster(0, 1, 2, 1, struct.pack('H' * 2, 1, 65534)) stats = ds.GetRasterBand(1).GetStatistics(0, 1) ds = None expected_stats = [0.0, 65535.0, 32767.5, 32767.000003814814] if max([abs(stats[i] - expected_stats[i]) for i in range(4)]) > 1e-15: gdaltest.post_reason('did not get expected stats') print(stats) return 'fail' return 'success'
# -*- coding: utf-8 -*- """ Created on Thu Jul 5 14:19:01 2018 @author: acalderon """ #%% import numpy as np from osgeo import gdal band2 = r'C:\Users\acalderon\opt\bda\Bulk Order 921204\Landsat 8 OLI_TIRS C1 Level-1\LC08_L1TP_040036_20180621_20180622_01_RT_B2.tif' ds = gdal.Open(band2) gdal.Translate("H:/B2.csv", ds, format="XYZ") band3 = r'C:\Users\acalderon\opt\bda\Bulk Order 921204\Landsat 8 OLI_TIRS C1 Level-1\LC08_L1TP_040036_20180621_20180622_01_RT_B3.tif' ds = gdal.Open(band3) gdal.Translate("H:/B3.csv", ds, format="XYZ") band4 = r'C:\Users\acalderon\opt\bda\Bulk Order 921204\Landsat 8 OLI_TIRS C1 Level-1\LC08_L1TP_040036_20180621_20180622_01_RT_B4.tif' ds = gdal.Open(band4) gdal.Translate("H:/B4.csv", ds, format="XYZ") band5 = r'C:\Users\acalderon\opt\bda\Bulk Order 921204\Landsat 8 OLI_TIRS C1 Level-1\LC08_L1TP_040036_20180621_20180622_01_RT_B5.tif' ds = gdal.Open(band5) gdal.Translate("H:/B5.csv", ds, format="XYZ") #%% from pyspark.sql import SparkSession from pyspark.sql.types import StructField, IntegerType, StructType spark = SparkSession.builder.appName("SimpleApp").config( "spark.executor.memory", "6g").config("spark.driver.memory", "6g").getOrCreate() schemaString = "x y v"
sys.exit() mergedRaster = gdal.Open(output_dir + '/merged.tif') wktPolygon = 'POLYGON ((%s %s,%s %s,%s %s,%s %s,%s %s))' % ( lon_min, lat_min, lon_min, lat_max, lon_max, lat_max, lon_max, lat_min, lon_min, lat_min) output_path = output_dir + '/result.tif' try: mergedRaster = crop_raster_by_wkt_polygon(mergedRaster, wktPolygon, output_path) options_list = [ '-ot Byte', '-of GTiff', #'-b 1', #'-scale' ] options_string = " ".join(options_list) output_filename, extension = os.path.splitext(output_path) gdal.Translate(output_filename + '.jpg', output_path, rgbExpand='RGB', options=options_string) except: print('Crop is failed!') sys.exit() finally: os.remove(output_dir + '/merged.tif') print("All temp files are removed")
def trueColour(argv): inputDirectory = sys.argv[1] outputDirectory = sys.argv[2] platformName = sys.argv[3] productType = sys.argv[4] if len(sys.argv) == 6: aoiwkt = sys.argv[5] else: aoiwkt = None print gdal.VersionInfo() gdal.UseExceptions() if platformName == 'PLEIADES' or platformName == 'SPOT': start = time.time() #Look for JPEG2000 files and then look for TIFF files jp2FilePaths = findFiles(inputDirectory, "jp2") tiffFilePaths = findFiles(inputDirectory, ("tiff", "tif")) #Create another array containing the filepaths regardless of file type if len(jp2FilePaths) > 0: imageFilePaths = jp2FilePaths elif len(tiffFilePaths) > 0: imageFilePaths = tiffFilePaths #Transfer all filepaths to imageFilePaths array no matter what type of file they are else: #It couldn't find jp2 or tiff files sys.exit("Missing image files in directory " + inputDirectory) panTileFilePaths = [] msTileFilePaths = [] ps4bandTileFilePaths = [] psRGBTileFilePaths = [] psRGNTileFilePaths = [] #Label the files for filePath in imageFilePaths: path, fileName = os.path.split( filePath ) #Splits filepaths in imageFilePaths array into path and filename if "_P_" in fileName: print("Image is panchromatic.") panTileFilePaths.append(filePath) elif "_MS_" in fileName: print("Image is 4 bands multispectral.") msTileFilePaths.append(filePath) elif "_PMS_" in fileName: print("Image is 4 bands pansharpened.") ps4bandTileFilePaths.append(filePath) elif "_PMS-N_" in fileName: print("Image is 3 bands pansharpened (B, G, R bands).") psRGBTileFilePaths.append(filePath) elif "_PMS-X_" in fileName: print( "Image is 3 bands pansharpened (G, R, NIR bands, false colour)." ) psRGNTileFilePaths.append(filePath) # Check if images are tiled. panImageFilePath = mosaic(panTileFilePaths, "/panmosaic.vrt", outputDirectory) msImageFilePath = mosaic(msTileFilePaths, "/msmosaic.vrt", outputDirectory) ps4bandImageFilePath = mosaic(ps4bandTileFilePaths, "/ps4bandmosaic.vrt", outputDirectory) psRGBImageFilePath = mosaic(psRGBTileFilePaths, "/psRGBmosaic.vrt", outputDirectory) psRGNImageFilePath = mosaic(psRGNTileFilePaths, "/psRGNmosaic.vrt", outputDirectory) finalImageFilePath = None if panImageFilePath and msImageFilePath: #If they both exist then it's a bundle. finalImageFilePath = outputDirectory + "/pansharpen.vrt" gdal_pansharpen.gdal_pansharpen([ '', panImageFilePath, msImageFilePath, finalImageFilePath, '-nodata', '0', '-co', 'PHOTOMETRIC=RGB', '-of', 'VRT' ]) elif panImageFilePath: #It's just a pan file finalImageFilePath = panImageFilePath elif msImageFilePath: #It's just an MS file finalImageFilePath = msImageFilePath elif ps4bandImageFilePath: finalImageFilePath = ps4bandImageFilePath elif psRGBImageFilePath: finalImageFilePath = psRGBImageFilePath elif psRGNImageFilePath: finalImageFilePath = psRGNImageFilePath output(finalImageFilePath, outputDirectory, aoiwkt, start) print("True Colour script finished for PLEIADES product(s) at " + inputDirectory) executionTime = time.time() - start print(executionTime) elif platformName == 'KOMPSAT-2': start = time.time() #Find tiff files tiffFilesPaths = findFiles(inputDirectory, ("tif", "tiff")) if len(tiffFilesPaths) == 0: sys.exit("No TIFF file in the directory " + inputDirectory) elif len(tiffFilesPaths) == 1: #KOMPSAT 2 pansharpened print("Found 1 tiff file.") PStiffFilePath = tiffFilesPaths[0] path, fileName = os.path.split(PStiffFilePath) if "_1G_1MC.TIF" or "_PS.TIF" in fileName.upper(): #Convert to vrt panSharpenFilePath = outputDirectory + "/pansharpen.vrt" gdal.Translate(panSharpenFilePath, PStiffFilePath, format="VRT") else: sys.exit("Unable to identify file type.") #Bundle = 1 pan file, 4 MS files - make composite MS image then pansharpen elif len(tiffFilesPaths) == 5: print("Found 5 tiff files.") #Label the pan and MS files bandFilePaths = [] panFilePathArray = [] #Locate pan file for filePath in tiffFilesPaths: path, fileName = os.path.split(filePath) if "PN" in fileName.upper() and "_1" in fileName.upper(): panFilePathArray.append(filePath) elif "PP" in fileName.upper() and "_1" in fileName.upper(): panFilePathArray.append(filePath) #Check the correct number of pan files have been added to the array. if len(panFilePathArray) < 1: sys.exit("Unable to locate pan file in directory " + inputDirectory) elif len(panFilePathArray) > 1: sys.exit("More than one pan file found in directory" + inputDirectory) else: panFilePath = panFilePathArray[0] #Locate red files if not fileType(tiffFilesPaths, "R_1", None, bandFilePaths, 1): sys.exit("Error when locating red file.") #Locate green files if not fileType(tiffFilesPaths, "G_1R.TIF", "G_1G.TIF", bandFilePaths, 2): sys.exit("Error when locating green file.") #Locate blue files if not fileType(tiffFilesPaths, "B_1", None, bandFilePaths, 3): sys.exit("Error when locating blue file.") #Create composite MS image print( "Successfully located pan and MS files. Creating composite colour image." ) #Create vrt for bands colourFilePath = outputDirectory + "/spectral.vrt" try: gdal.BuildVRT(colourFilePath, bandFilePaths, separate=True) except RuntimeError: sys.exit("Error with gdal.BuildVRT") #Now pansharpen panSharpenFilePath = outputDirectory + "/pansharpen.vrt" gdal_pansharpen.gdal_pansharpen([ '', panFilePath, colourFilePath, panSharpenFilePath, '-nodata', '0', '-co', 'PHOTOMETRIC=RGB', '-of', 'VRT' ]) else: sys.exit("Invalid number of files found. " + str(len(tiffFilesPaths)) + " files found in directory " + inputDirectory) output(panSharpenFilePath, outputDirectory, aoiwkt, start) print("True Colour script finished for Kompsat-2 product(s) at " + inputDirectory) totalExecutionTime = time.time() - start print("Total execution time: " + str(totalExecutionTime)) elif platformName == 'KOMPSAT-3': start = time.time() tiffFilesPath = findFiles(inputDirectory, ("tif", "tiff")) if len(tiffFilesPath) < 1: sys.exit("Missing image files in directory " + inputDirectory) pantileFilePaths = [] redtileFilePaths = [] greentileFilePaths = [] bluetileFilePaths = [] PRtileFilePaths = [] PGtileFilePaths = [] PBtileFilePaths = [] #Label files for filePath in tiffFilesPath: path, fileName = os.path.split(filePath) if "_P_R" in fileName.upper() or "_P.TIF" in fileName.upper( ): #This will cause an issue as it'll be picked up by PG print("Image is panchromatic.") pantileFilePaths.append(filePath) elif "_R_R" in fileName.upper() or "_R.TIF" in fileName.upper(): print("Image is red MS file.") redtileFilePaths.append(filePath) elif "_G_R" in fileName.upper() or "_G.TIF" in fileName.upper(): print("Image is green MS file.") greentileFilePaths.append(filePath) elif "_B_R" in fileName.upper() or "_B.TIF" in fileName.upper(): print("Image is blue MS file.") bluetileFilePaths.append(filePath) elif "_PR_R" in fileName.upper() or "_PR.TIF" in fileName.upper(): print("Image is pansharpened red file.") PRtileFilePaths.append(filePath) elif "_PG_R" in fileName.upper() or "_PG.TIF" in fileName.upper(): print("Image is pansharpened green file.") PGtileFilePaths.append(filePath) elif "_PB_R" in fileName.upper() or "_PB.TIF" in fileName.upper(): print("Image is pansharpened blue file.") PBtileFilePaths.append(filePath) #Check for tiles panimageFilePath = mosaic(pantileFilePaths, "/panmosaic.vrt", outputDirectory) redFilePath = mosaic(redtileFilePaths, "/redmosaic.vrt", outputDirectory) greenFilePath = mosaic(greentileFilePaths, "/greenmosaic.vrt", outputDirectory) blueFilePath = mosaic(bluetileFilePaths, "/bluemosaic.vrt", outputDirectory) PRFilePath = mosaic(PRtileFilePaths, "/PRmosaic.vrt", outputDirectory) PGFilePath = mosaic(PGtileFilePaths, "/PGmosaic.vrt", outputDirectory) PBFilePath = mosaic(PBtileFilePaths, "/PBmosaic.vrt", outputDirectory) if redFilePath and greenFilePath and blueFilePath: PSFiles = [redFilePath, greenFilePath, blueFilePath] #Create composite image from 3 bands MSFilePath = outputDirectory + "/MS.vrt" gdal.BuildVRT(MSFilePath, PSFiles, separate=True) if panimageFilePath: #Now panSharpen finalimageFilePath = outputDirectory + "/pansharpen.vrt" gdal_pansharpen.gdal_pansharpen([ '', panimageFilePath, MSFilePath, finalimageFilePath, '-nodata', '0', '-co', 'PHOTOMETRIC=RGB', '-of', 'VRT' ]) else: finalimageFilePath = MSFilePath elif PRFilePath and PGFilePath and PBFilePath: PSFiles = [PRFilePath, PGFilePath, PBFilePath] #Combine 3 bands finalimageFilePath = outputDirectory + "/pansharpen.vrt" gdal.BuildVRT(finalimageFilePath, PSFiles, separate=True) else: sys.exit("Missing image files in directory " + inputDirectory) output(finalimageFilePath, outputDirectory, None, start) print("True Colour script finished for Kompsat-3 product(s) at " + inputDirectory) executiontime = time.time() - start print("Total execution time: " + str(executiontime)) elif platformName == "KOMPSAT-3A": start = time.time() #Find tiff files tiffFilePaths = findFiles(inputDirectory, ("tif", "tiff")) if len(tiffFilePaths) == 0: sys.exit("No TIFF file in the directory " + inputDirectory) elif len( tiffFilePaths ) == 4: #Pansharpened KOMPSAT 3A, combine RGB bands into composite image print("Found 4 files") PSFiles = [] #Add red files to the array if not fileType(tiffFilePaths, "_PR.TIF", None, PSFiles, 1): sys.exit("Error when locating red file.") #Add green files to the array if not fileType(tiffFilePaths, "_PG.TIF", None, PSFiles, 2): sys.exit("Error when locating green file.") #Add blue files to the array if not fileType(tiffFilePaths, "_PB.TIF", None, PSFiles, 3): sys.exit("Error when locating blue file.") #Create composite PS image from 3 bands panSharpenFilePath = outputDirectory + "/pansharpened.vrt" gdal.BuildVRT(panSharpenFilePath, PSFiles, separate=True) #For a bundle elif len( tiffFilePaths ) == 5: #Bundle = 1 pan file, 4 MS files - make composite MS image then pansharpen print("Found 5 files") #Label the pan and MS files bandFilePaths = [] panFilePathArray = [] #Locate pan files if not fileType(tiffFilePaths, "_P.TIF", None, panFilePathArray, 1): sys.exit("Error when locating pan file.") panFilePath = panFilePathArray[0] #Locate red files if not fileType(tiffFilePaths, "_R.TIF", None, bandFilePaths, 1): sys.exit("Error when locating red file.") #Locate green files if not fileType(tiffFilePaths, "_G.TIF", None, bandFilePaths, 2): sys.exit("Error when locating green file.") #Locate blue files if not fileType(tiffFilePaths, "_B.TIF", None, bandFilePaths, 3): sys.exit("Error when locating blue file.") #Create composite MS image #Create vrt for bands colourFilePath = outputDirectory + "/spectral.vrt" try: gdal.BuildVRT(colourFilePath, bandFilePaths, separate=True) except RuntimeError: print("Error with gdal.BuildVRT") #Now pansharpen panSharpenFilePath = outputDirectory + "/pansharpen.vrt" gdal_pansharpen.gdal_pansharpen([ '', panFilePath, colourFilePath, panSharpenFilePath, '-nodata', '0', '-co', 'PHOTOMETRIC=RGB', '-of', 'VRT' ]) else: sys.exit("Invalid number of files found. " + len(tiffFilePaths) + " files found in directory " + inputDirectory) output(panSharpenFilePath, outputDirectory, None, start) print("True Colour script finished for Kompsat-3A product(s) at " + inputDirectory) executiontime = time.time() - start print("Total execution time: " + str(executiontime))