def main(rfile): graster = gdal.Open(rfile) band = graster.GetRasterBand(1) stats = band.GetStatistics(True, True) def EQUAL(a, b): return a.lower() == b.lower() bComputeMinMax = False bShowGCPs = True bShowMetadata = True bShowRAT = True bStats = False bApproxStats = True bShowColorTable = True bComputeChecksum = False bReportHistograms = False pszFilename = None papszExtraMDDomains = [] pszProjection = None hTransform = None bShowFileList = True ######################################################## ######################################################## #bReportHistograms = True #-hist bStats = True #-stats bApproxStats = True #-stats hDataset = gdal.Open(rfile, gdal.GA_ReadOnly) if hDataset is None: print("gdalinfo failed - unable to open '%s'." % pszFilename) return 1 # -------------------------------------------------------------------- # Report general info. # -------------------------------------------------------------------- hDriver = hDataset.GetDriver() print( "Driver: %s/%s" % ( \ hDriver.ShortName, \ hDriver.LongName )) papszFileList = hDataset.GetFileList() if papszFileList is None or len(papszFileList) == 0: print("Files: none associated") else: print("Files: %s" % papszFileList[0]) if bShowFileList: for i in range(1, len(papszFileList)): print(" %s" % papszFileList[i]) print("Size is %d, %d" % (hDataset.RasterXSize, hDataset.RasterYSize)) # -------------------------------------------------------------------- # Report projection. # -------------------------------------------------------------------- pszProjection = hDataset.GetProjectionRef() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) print("Coordinate System is:\n%s" % pszPrettyWkt) else: print("Coordinate System is `%s'" % pszProjection) # -------------------------------------------------------------------- # Report Geotransform. # -------------------------------------------------------------------- adfGeoTransform = hDataset.GetGeoTransform(can_return_null=True) if adfGeoTransform is not None: if adfGeoTransform[2] == 0.0 and adfGeoTransform[4] == 0.0: print( "Origin = (%.15f,%.15f)" % ( \ adfGeoTransform[0], adfGeoTransform[3] )) print( "Pixel Size = (%.15f,%.15f)" % ( \ adfGeoTransform[1], adfGeoTransform[5] )) else: print( "GeoTransform =\n" \ " %.16g, %.16g, %.16g\n" \ " %.16g, %.16g, %.16g" % ( \ adfGeoTransform[0], \ adfGeoTransform[1], \ adfGeoTransform[2], \ adfGeoTransform[3], \ adfGeoTransform[4], \ adfGeoTransform[5] )) # -------------------------------------------------------------------- # Report GCPs. # -------------------------------------------------------------------- if bShowGCPs and hDataset.GetGCPCount() > 0: pszProjection = hDataset.GetGCPProjection() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) print("GCP Projection = \n%s" % pszPrettyWkt) else: print( "GCP Projection = %s" % \ pszProjection ) gcps = hDataset.GetGCPs() i = 0 for gcp in gcps: print( "GCP[%3d]: Id=%s, Info=%s\n" \ " (%.15g,%.15g) -> (%.15g,%.15g,%.15g)" % ( \ i, gcp.Id, gcp.Info, \ gcp.GCPPixel, gcp.GCPLine, \ gcp.GCPX, gcp.GCPY, gcp.GCPZ )) i = i + 1 # -------------------------------------------------------------------- # Report metadata. # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0: print("Metadata:") for metadata in papszMetadata: print(" %s" % metadata) if bShowMetadata: for extra_domain in papszExtraMDDomains: papszMetadata = hDataset.GetMetadata_List(extra_domain) if papszMetadata is not None and len(papszMetadata) > 0: print("Metadata (%s):" % extra_domain) for metadata in papszMetadata: print(" %s" % metadata) # -------------------------------------------------------------------- # Report "IMAGE_STRUCTURE" metadata. # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0: print("Image Structure Metadata:") for metadata in papszMetadata: print(" %s" % metadata) # -------------------------------------------------------------------- # Report subdatasets. # -------------------------------------------------------------------- papszMetadata = hDataset.GetMetadata_List("SUBDATASETS") if papszMetadata is not None and len(papszMetadata) > 0: print("Subdatasets:") for metadata in papszMetadata: print(" %s" % metadata) # -------------------------------------------------------------------- # Report geolocation. # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("GEOLOCATION") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0: print("Geolocation:") for metadata in papszMetadata: print(" %s" % metadata) # -------------------------------------------------------------------- # Report RPCs # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("RPC") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0: print("RPC Metadata:") for metadata in papszMetadata: print(" %s" % metadata) # -------------------------------------------------------------------- # Setup projected to lat/long transform if appropriate. # -------------------------------------------------------------------- if pszProjection is not None and len(pszProjection) > 0: hProj = osr.SpatialReference(pszProjection) if hProj is not None: hLatLong = hProj.CloneGeogCS() if hLatLong is not None: gdal.PushErrorHandler('CPLQuietErrorHandler') hTransform = osr.CoordinateTransformation(hProj, hLatLong) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Unable to load PROJ.4 library') != -1: hTransform = None # -------------------------------------------------------------------- # Report corners. # -------------------------------------------------------------------- print("Corner Coordinates:") GDALInfoReportCorner( hDataset, hTransform, "Upper Left", \ 0.0, 0.0 ) GDALInfoReportCorner( hDataset, hTransform, "Lower Left", \ 0.0, hDataset.RasterYSize) GDALInfoReportCorner( hDataset, hTransform, "Upper Right", \ hDataset.RasterXSize, 0.0 ) GDALInfoReportCorner( hDataset, hTransform, "Lower Right", \ hDataset.RasterXSize, \ hDataset.RasterYSize ) GDALInfoReportCorner( hDataset, hTransform, "Center", \ hDataset.RasterXSize/2.0, \ hDataset.RasterYSize/2.0 ) # ==================================================================== # Loop over bands. # ==================================================================== for iBand in range(hDataset.RasterCount): hBand = hDataset.GetRasterBand(iBand + 1) (nBlockXSize, nBlockYSize) = hBand.GetBlockSize() print( "Band %d Block=%dx%d Type=%s, ColorInterp=%s" % ( iBand+1, \ nBlockXSize, nBlockYSize, \ gdal.GetDataTypeName(hBand.DataType), \ gdal.GetColorInterpretationName( \ hBand.GetRasterColorInterpretation()) )) if hBand.GetDescription() is not None \ and len(hBand.GetDescription()) > 0 : print(" Description = %s" % hBand.GetDescription()) dfMin = hBand.GetMinimum() dfMax = hBand.GetMaximum() if dfMin is not None or dfMax is not None or bComputeMinMax: line = " " if dfMin is not None: line = line + ("Min=%.3f " % dfMin) if dfMax is not None: line = line + ("Max=%.3f " % dfMax) if bComputeMinMax: gdal.ErrorReset() adfCMinMax = hBand.ComputeRasterMinMax(False) if gdal.GetLastErrorType() == gdal.CE_None: line = line + ( " Computed Min/Max=%.3f,%.3f" % ( \ adfCMinMax[0], adfCMinMax[1] )) print(line) stats = hBand.GetStatistics(bApproxStats, bStats) # Dirty hack to recognize if stats are valid. If invalid, the returned # stddev is negative if stats[3] >= 0.0: print( " Minimum=%.3f, Maximum=%.3f, Mean=%.3f, StdDev=%.3f" % ( \ stats[0], stats[1], stats[2], stats[3] )) if bReportHistograms: hist = hBand.GetDefaultHistogram(force=True, callback=gdal.TermProgress) if hist is not None: dfMin = hist[0] dfMax = hist[1] nBucketCount = hist[2] panHistogram = hist[3] print( " %d buckets from %g to %g:" % ( \ nBucketCount, dfMin, dfMax )) line = ' ' for bucket in panHistogram: line = line + ("%d " % bucket) print(line) if bComputeChecksum: print(" Checksum=%d" % hBand.Checksum()) dfNoData = hBand.GetNoDataValue() if dfNoData is not None: if dfNoData != dfNoData: print(" NoData Value=nan") else: print(" NoData Value=%.18g" % dfNoData) if hBand.GetOverviewCount() > 0: line = " Overviews: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%dx%d" % (hOverview.XSize, hOverview.YSize)) pszResampling = \ hOverview.GetMetadataItem( "RESAMPLING", "" ) if pszResampling is not None \ and len(pszResampling) >= 12 \ and EQUAL(pszResampling[0:12],"AVERAGE_BIT2"): line = line + "*" else: line = line + "(null)" print(line) if bComputeChecksum: line = " Overviews checksum: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%d" % hOverview.Checksum()) else: line = line + "(null)" print(line) if hBand.HasArbitraryOverviews(): print(" Overviews: arbitrary") nMaskFlags = hBand.GetMaskFlags() if (nMaskFlags & (gdal.GMF_NODATA | gdal.GMF_ALL_VALID)) == 0: hMaskBand = hBand.GetMaskBand() line = " Mask Flags: " if (nMaskFlags & gdal.GMF_PER_DATASET) != 0: line = line + "PER_DATASET " if (nMaskFlags & gdal.GMF_ALPHA) != 0: line = line + "ALPHA " if (nMaskFlags & gdal.GMF_NODATA) != 0: line = line + "NODATA " if (nMaskFlags & gdal.GMF_ALL_VALID) != 0: line = line + "ALL_VALID " print(line) if hMaskBand is not None and \ hMaskBand.GetOverviewCount() > 0: line = " Overviews of mask band: " for iOverview in range(hMaskBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hMaskBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%d" % hOverview.Checksum()) else: line = line + "(null)" if len(hBand.GetUnitType()) > 0: print(" Unit Type: %s" % hBand.GetUnitType()) papszCategories = hBand.GetRasterCategoryNames() if papszCategories is not None: print(" Categories:") i = 0 for category in papszCategories: print(" %3d: %s" % (i, category)) i = i + 1 if hBand.GetScale() != 1.0 or hBand.GetOffset() != 0.0: print( " Offset: %.15g, Scale:%.15g" % \ ( hBand.GetOffset(), hBand.GetScale())) if bShowMetadata: papszMetadata = hBand.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print(" Metadata:") for metadata in papszMetadata: print(" %s" % metadata) if bShowMetadata: papszMetadata = hBand.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print(" Image Structure Metadata:") for metadata in papszMetadata: print(" %s" % metadata) hTable = hBand.GetRasterColorTable() if hBand.GetRasterColorInterpretation() == gdal.GCI_PaletteIndex \ and hTable is not None: print( " Color Table (%s with %d entries)" % (\ gdal.GetPaletteInterpretationName( \ hTable.GetPaletteInterpretation( )), \ hTable.GetCount() )) if bShowColorTable: for i in range(hTable.GetCount()): sEntry = hTable.GetColorEntry(i) print( " %3d: %d,%d,%d,%d" % ( \ i, \ sEntry[0],\ sEntry[1],\ sEntry[2],\ sEntry[3] )) if bShowRAT: pass #hRAT = hBand.GetDefaultRAT() #GDALRATDumpReadable( hRAT, None ); return 0
def ogr_rfc41_4(): ds = ogr.GetDriverByName('memory').CreateDataSource('') if ds.TestCapability(ogr.ODsCCreateGeomFieldAfterCreateLayer) == 0: gdaltest.post_reason('fail') return 'fail' sr = osr.SpatialReference() lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint, srs=sr) if lyr.TestCapability(ogr.OLCCreateGeomField) == 0: gdaltest.post_reason('fail') return 'fail' if lyr.GetSpatialRef().IsSame(sr) == 0: gdaltest.post_reason('fail') return 'fail' if lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef().IsSame(sr) == 0: gdaltest.post_reason('fail') return 'fail' lyr.GetLayerDefn().GetGeomFieldDefn(0).SetName('a_name') feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetGeometry(ogr.CreateGeometryFromWkt('POINT (1 2)')) lyr.CreateFeature(feat) lyr.ResetReading() feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() if geom.GetSpatialReference().IsSame(sr) == 0: gdaltest.post_reason('fail') return 'fail' feat = None lyr.CreateGeomField(ogr.GeomFieldDefn('another_geom_field', ogr.wkbPolygon)) lyr.ResetReading() feat = lyr.GetNextFeature() feat.SetGeomField( 1, ogr.CreateGeometryFromWkt('POLYGON ((10 10,10 11,11 11,11 10,10 10))')) lyr.SetFeature(feat) lyr.ResetReading() feat = lyr.GetNextFeature() geom = feat.GetGeomFieldRef(0) if geom.ExportToWkt() != 'POINT (1 2)': gdaltest.post_reason('fail') return 'fail' geom = feat.GetGeomFieldRef('another_geom_field') if geom.ExportToWkt() != 'POLYGON ((10 10,10 11,11 11,11 10,10 10))': gdaltest.post_reason('fail') return 'fail' # Test GetExtent() got_extent = lyr.GetExtent(geom_field=1) if got_extent != (10.0, 11.0, 10.0, 11.0): gdaltest.post_reason('fail') return 'fail' # Test invalid geometry field index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') got_extent = lyr.GetExtent(geom_field=2) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' # Test SetSpatialFilter() lyr.SetSpatialFilter( 1, ogr.CreateGeometryFromWkt( 'POLYGON ((-10 10,-10 11,-11 11,-11 10,-10 10))')) lyr.ResetReading() feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' lyr.SetSpatialFilter( 1, ogr.CreateGeometryFromWkt('POLYGON ((10 10,10 11,11 11,11 10,10 10))')) lyr.ResetReading() feat = lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' lyr.SetSpatialFilterRect(1, 10, 10, 11, 11) lyr.ResetReading() feat = lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' # Test invalid spatial filter index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') lyr.SetSpatialFilterRect(2, 0, 0, 0, 0) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' lyr.SetSpatialFilter(None) another_lyr = ds.CopyLayer(lyr, 'dup_test') dup_feat = another_lyr.GetNextFeature() geom = dup_feat.GetGeomFieldRef('a_name') if geom.ExportToWkt() != 'POINT (1 2)': gdaltest.post_reason('fail') return 'fail' geom = dup_feat.GetGeomFieldRef('another_geom_field') if geom.ExportToWkt() != 'POLYGON ((10 10,10 11,11 11,11 10,10 10))': gdaltest.post_reason('fail') return 'fail' return 'success'
def ogr_virtualogr_2(): if not ogrtest.has_sqlite_dialect: return 'skip' ds = ogr.GetDriverByName('SQLite').CreateDataSource('/vsimem/ogr_virtualogr_2.db') ds.ExecuteSQL("CREATE VIRTUAL TABLE foo USING VirtualOGR('data/poly.shp')") ds.ExecuteSQL("CREATE TABLE spy_table (spy_content VARCHAR)") ds.ExecuteSQL("CREATE TABLE regular_table (bar VARCHAR)") ds = None # Check that foo isn't listed ds = ogr.Open('/vsimem/ogr_virtualogr_2.db') for i in range(ds.GetLayerCount()): if ds.GetLayer(i).GetName() == 'foo': gdaltest.post_reason('fail') return 'fail' ds = None # Check that it is listed if OGR_SQLITE_LIST_VIRTUAL_OGR=YES gdal.SetConfigOption('OGR_SQLITE_LIST_VIRTUAL_OGR', 'YES') ds = ogr.Open('/vsimem/ogr_virtualogr_2.db') gdal.SetConfigOption('OGR_SQLITE_LIST_VIRTUAL_OGR', None) found = False for i in range(ds.GetLayerCount()): if ds.GetLayer(i).GetName() == 'foo': found = True if not found: gdaltest.post_reason('fail') return 'fail' ds = None # Add suspicious trigger ds = ogr.Open('/vsimem/ogr_virtualogr_2.db', update = 1) ds.ExecuteSQL("CREATE TRIGGER spy_trigger INSERT ON regular_table BEGIN " + \ "INSERT OR REPLACE INTO spy_table (spy_content) " + \ "SELECT OGR_STYLE FROM foo; END;") ds = None gdal.ErrorReset() ds = ogr.Open('/vsimem/ogr_virtualogr_2.db') for i in range(ds.GetLayerCount()): if ds.GetLayer(i).GetName() == 'foo': gdaltest.post_reason('fail') return 'fail' # An error will be triggered at the time the trigger is used gdal.PushErrorHandler('CPLQuietErrorHandler') ds.ExecuteSQL("INSERT INTO regular_table (bar) VALUES ('bar')") gdal.PopErrorHandler() did_not_get_error = gdal.GetLastErrorMsg() == '' ds = None if did_not_get_error: gdal.Unlink('/vsimem/ogr_virtualogr_2.db') gdaltest.post_reason('expected a failure') return 'fail' gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') gdal.SetConfigOption('OGR_SQLITE_LIST_VIRTUAL_OGR', 'YES') ds = ogr.Open('/vsimem/ogr_virtualogr_2.db') gdal.SetConfigOption('OGR_SQLITE_LIST_VIRTUAL_OGR', None) gdal.PopErrorHandler() if ds is not None: ds = None gdal.Unlink('/vsimem/ogr_virtualogr_2.db') gdaltest.post_reason('expected a failure') return 'fail' did_not_get_error = gdal.GetLastErrorMsg() == '' ds = None gdal.Unlink('/vsimem/ogr_virtualogr_2.db') if did_not_get_error: gdaltest.post_reason('expected a failure') return 'fail' return 'success'
def pam_11(): # Create a read-only directory try: os.chmod('tmpdirreadonly', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) shutil.rmtree('tmpdirreadonly') except: pass os.mkdir('tmpdirreadonly') shutil.copy('data/byte.tif', 'tmpdirreadonly/byte.tif') # FIXME: how do we create a read-only dir on windows ? # The following has no effect os.chmod('tmpdirreadonly', stat.S_IRUSR | stat.S_IXUSR) # Test that the directory is really read-only try: f = open('tmpdirreadonly/test', 'w') if f is not None: f.close() return 'skip' except: pass # Compute statistics --> the saving as .aux.xml should fail ds = gdal.Open('tmpdirreadonly/byte.tif') stats = ds.GetRasterBand(1).ComputeStatistics(False) if stats[0] != 74: gdaltest.post_reason('did not get expected minimum') return 'fail' gdal.ErrorReset() ds = None error_msg = gdal.GetLastErrorMsg() if error_msg.find('Unable to save auxiliary information') != 0: gdaltest.post_reason('warning was expected at that point') return 'fail' # Check that we actually have no saved statistics ds = gdal.Open('tmpdirreadonly/byte.tif') stats = ds.GetRasterBand(1).GetStatistics(False, False) if stats[3] != -1: gdaltest.post_reason('did not expected to have stats at that point') return 'fail' ds = None # This must be run as an external process so we can override GDAL_PAM_PROXY_DIR # at the beginning of the process import test_py_scripts ret = test_py_scripts.run_py_script_as_external_script( '.', 'pamproxydb', '-test1') if ret.find('success') == -1: gdaltest.post_reason('pamproxydb.py -test1 failed %s' % ret) return 'fail' # Test loading an existing proxydb ret = test_py_scripts.run_py_script_as_external_script( '.', 'pamproxydb', '-test2') if ret.find('success') == -1: gdaltest.post_reason('pamproxydb.py -test2 failed %s' % ret) return 'fail' return 'success'
def testMetaCRS(self): result = self.parse_line() if result != 'success': return result try: gdal.PushErrorHandler('CPLQuietErrorHandler') ct = osr.CoordinateTransformation(self.src_srs, self.dst_srs) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1: gdaltest.post_reason( 'PROJ.4 missing, transforms not available.') return 'skip' except ValueError: gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1: gdaltest.post_reason( 'PROJ.4 missing, transforms not available.') return 'skip' else: gdaltest.post_reason( 'failed to create coordinate transformation. %s' % gdal.GetLastErrorMsg()) return 'fail' except: gdal.PopErrorHandler() gdaltest.post_reason( 'failed to create coordinate transformation. %s' % gdal.GetLastErrorMsg()) return 'fail' ###################################################################### # Tranform source point to destination SRS, swapping EPSG GEOGCS # axes if needed. if self.src_srs.EPSGTreatsAsLatLong(): self.src_xyz = (self.src_xyz[1], self.src_xyz[0], self.src_xyz[2]) result = ct.TransformPoint(self.src_xyz[0], self.src_xyz[1], self.src_xyz[2]) if self.src_srs.EPSGTreatsAsLatLong(): result = (result[1], result[0], result[2]) ###################################################################### # Check results. error = abs(result[0] - self.dst_xyz[0]) \ + abs(result[1] - self.dst_xyz[1]) \ + abs(result[2] - self.dst_xyz[2]) if error > self.dst_error: err_msg = 'Dest error is %g, src=%g,%g,%g, dst=%g,%g,%g, exp=%g,%g,%g' \ % (error, self.src_xyz[0],self.src_xyz[1],self.src_xyz[2], result[0], result[1], result[2], self.dst_xyz[0],self.dst_xyz[1],self.dst_xyz[2]) gdaltest.post_reason(err_msg) gdal.Debug( 'OSR', 'Src SRS:\n%s\n\nDst SRS:\n%s\n' \ % (self.src_srs.ExportToPrettyWkt(), self.dst_srs.ExportToPrettyWkt()) ) return 'fail' return 'success'
def plmosaic_17(): if gdaltest.plmosaic_drv is None: return 'skip' gdal.SetConfigOption('PL_URL', '/vsimem/root') ds = gdal.OpenEx( 'PLMosaic:', gdal.OF_RASTER, open_options=['API_KEY=foo', 'MOSAIC=my_mosaic', 'CACHE_PATH=tmp']) gdal.SetConfigOption('PL_URL', None) if ds is None: gdaltest.post_reason('fail') return 'fail' if ds.GetMetadata() != { 'LAST_ACQUIRED': 'last_date', 'NAME': 'my_mosaic', 'FIRST_ACQUIRED': 'first_date' }: gdaltest.post_reason('fail') print(ds.GetMetadata()) return 'fail' if ds.GetProjectionRef().find('3857') < 0: gdaltest.post_reason('fail') print(ds.GetProjectionRef()) return 'fail' if ds.RasterXSize != 8388608: gdaltest.post_reason('fail') print(ds.RasterXSize) return 'fail' if ds.RasterYSize != 8388608: gdaltest.post_reason('fail') print(ds.RasterYSize) return 'fail' got_gt = ds.GetGeoTransform() expected_gt = (-20037508.34, 4.7773142671600004, 0.0, 20037508.34, 0.0, -4.7773142671600004) for i in range(6): if abs(got_gt[i] - expected_gt[i]) > 1e-8: gdaltest.post_reason('fail') print(ds.GetGeoTransform()) return 'fail' if ds.GetMetadataItem('INTERLEAVE', 'IMAGE_STRUCTURE') != 'PIXEL': gdaltest.post_reason('fail') print(ds.GetMetadata('IMAGE_STRUCTURE')) print(ds.GetMetadataItem('INTERLEAVE', 'IMAGE_STRUCTURE')) return 'fail' if ds.GetRasterBand(1).GetOverviewCount() != 15: gdaltest.post_reason('fail') print(ds.GetRasterBand(1).GetOverviewCount()) return 'fail' if ds.GetRasterBand(1).GetOverview(-1) is not None: gdaltest.post_reason('fail') return 'fail' if ds.GetRasterBand(1).GetOverview( ds.GetRasterBand(1).GetOverviewCount()) is not None: gdaltest.post_reason('fail') return 'fail' if ds.GetRasterBand(1).GetOverview(0) is None: gdaltest.post_reason('fail') return 'fail' try: shutil.rmtree('tmp/plmosaic_cache') except OSError: pass for i in range(12): # Read at one nonexistent position. ds.GetRasterBand(1).ReadRaster(4096 * i, 0, 1, 1) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' for i in range(11, -1, -1): # Again in the same quad, but in different block, to test cache ds.GetRasterBand(1).ReadRaster(4096 * i + 256, 0, 1, 1) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' for i in range(12): # Again in the same quad, but in different block, to test cache ds.GetRasterBand(1).ReadRaster(4096 * i + 512, 256, 1, 1) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' ds.FlushCache() # Invalid tile content gdal.FileFromMemBuffer('/vsimem/root/my_mosaic_id/quads/0-2047/full', 'garbage') gdal.PushErrorHandler() ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) gdal.PopErrorHandler() os.stat('tmp/plmosaic_cache/my_mosaic/my_mosaic_0-2047.tif') ds.FlushCache() shutil.rmtree('tmp/plmosaic_cache') # GeoTIFF but with wrong dimensions gdal.GetDriverByName('GTiff').Create( '/vsimem/root/my_mosaic_id/quads/0-2047/full', 1, 1, 1) gdal.PushErrorHandler() ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) gdal.PopErrorHandler() os.stat('tmp/plmosaic_cache/my_mosaic/my_mosaic_0-2047.tif') ds.FlushCache() shutil.rmtree('tmp/plmosaic_cache') # Good GeoTIFF tmp_ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/root/my_mosaic_id/quads/0-2047/full', 4096, 4096, 4, options=['INTERLEAVE=BAND', 'SPARSE_OK=YES']) tmp_ds.GetRasterBand(1).Fill(255) tmp_ds = None val = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) val = struct.unpack('B', val)[0] if val != 255: gdaltest.post_reason('fail') print(val) return 'fail' os.stat('tmp/plmosaic_cache/my_mosaic/my_mosaic_0-2047.tif') ds.FlushCache() # Read again from file cache. # We change the file behind the scene (but not changing its size) # to demonstrate that the cached tile is still use tmp_ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/root/my_mosaic_id/quads/0-2047/full', 4096, 4096, 4, options=['INTERLEAVE=BAND', 'SPARSE_OK=YES']) tmp_ds.GetRasterBand(1).Fill(1) tmp_ds = None val = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) val = struct.unpack('B', val)[0] if val != 255: gdaltest.post_reason('fail') print(val) return 'fail' ds = None # Read again from file cache, but with TRUST_CACHE=YES # delete the full GeoTIFF before gdal.Unlink('/vsimem/root/my_mosaic_id/quads/0-2047/full') gdal.SetConfigOption('PL_URL', '/vsimem/root') ds = gdal.OpenEx( 'PLMosaic:API_KEY=foo,MOSAIC=my_mosaic,CACHE_PATH=tmp,TRUST_CACHE=YES', gdal.OF_RASTER) gdal.SetConfigOption('PL_URL', None) val = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) val = struct.unpack('B', val)[0] if val != 255: gdaltest.post_reason('fail') print(val) return 'fail' ds = None # Read again from file cache but the metatile has changed in between gdal.SetConfigOption('PL_URL', '/vsimem/root') ds = gdal.OpenEx( 'PLMosaic:', gdal.OF_RASTER, open_options=['API_KEY=foo', 'MOSAIC=my_mosaic', 'CACHE_PATH=tmp']) gdal.SetConfigOption('PL_URL', None) tmp_ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/root/my_mosaic_id/quads/0-2047/full', 4096, 4096, 4, options=['INTERLEAVE=BAND', 'SPARSE_OK=YES']) tmp_ds.SetMetadataItem('foo', 'bar') tmp_ds.GetRasterBand(1).Fill(254) tmp_ds = None val = ds.ReadRaster(0, 0, 1, 1) val = struct.unpack('B' * 4, val) if val != (254, 0, 0, 0): gdaltest.post_reason('fail') print(val) return 'fail' return 'success'
def vsigs_read_credentials_oauth2_service_account(): if gdaltest.webserver_port == 0: return 'skip' gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('GS_ACCESS_KEY_ID', '') # Generated with 'openssl genrsa -out rsa-openssl.pem 1024' and # 'openssl pkcs8 -nocrypt -in rsa-openssl.pem -inform PEM -topk8 -outform PEM -out rsa-openssl.pkcs8.pem' # DO NOT USE in production !!!! key = """-----BEGIN PRIVATE KEY----- MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBAOlwJQLLDG1HeLrk VNcFR5Qptto/rJE5emRuy0YmkVINT4uHb1be7OOo44C2Ev8QPVtNHHS2XwCY5gTm i2RfIBLv+VDMoVQPqqE0LHb0WeqGmM5V1tHbmVnIkCcKMn3HpK30grccuBc472LQ DVkkGqIiGu0qLAQ89JP/r0LWWySRAgMBAAECgYAWjsS00WRBByAOh1P/dz4kfidy TabiXbiLDf3MqJtwX2Lpa8wBjAc+NKrPXEjXpv0W3ou6Z4kkqKHJpXGg4GRb4N5I 2FA+7T1lA0FCXa7dT2jvgJLgpBepJu5b//tqFqORb4A4gMZw0CiPN3sUsWsSw5Hd DrRXwp6sarzG77kvZQJBAPgysAmmXIIp9j1hrFSkctk4GPkOzZ3bxKt2Nl4GFrb+ bpKSon6OIhP1edrxTz1SMD1k5FiAAVUrMDKSarbh5osCQQDwxq4Tvf/HiYz79JBg Wz5D51ySkbg01dOVgFW3eaYAdB6ta/o4vpHhnbrfl6VO9oUb3QR4hcrruwnDHsw3 4mDTAkEA9FPZjbZSTOSH/cbgAXbdhE4/7zWOXj7Q7UVyob52r+/p46osAk9i5qj5 Kvnv2lrFGDrwutpP9YqNaMtP9/aLnwJBALLWf9n+GAv3qRZD0zEe1KLPKD1dqvrj j+LNjd1Xp+tSVK7vMs4PDoAMDg+hrZF3HetSQM3cYpqxNFEPgRRJOy0CQQDQlZHI yzpSgEiyx8O3EK1iTidvnLXbtWabvjZFfIE/0OhfBmN225MtKG3YLV2HoUvpajLq gwE6fxOLyJDxuWRf -----END PRIVATE KEY----- """ for i in range(2): gdal.SetConfigOption('GO2A_AUD', 'http://localhost:%d/oauth2/v4/token' % gdaltest.webserver_port) gdal.SetConfigOption('GOA2_NOW', '123456') if i == 0: gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY', key) else: gdal.FileFromMemBuffer('/vsimem/pkey', key) gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY_FILE', '/vsimem/pkey') gdal.SetConfigOption('GS_OAUTH2_CLIENT_EMAIL', 'CLIENT_EMAIL') gdal.VSICurlClearCache() handler = webserver.SequentialHandler() def method(request): content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii') if content != 'grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiAiQ0xJRU5UX0VNQUlMIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5yZWFkX3dyaXRlIiwgImF1ZCI6ICJodHRwOi8vbG9jYWxob3N0OjgwODAvb2F1dGgyL3Y0L3Rva2VuIiwgImlhdCI6IDEyMzQ1NiwgImV4cCI6IDEyNzA1Nn0%3D.DAhqWtBgKpObxZ%2BGiXqwF%2Fa4SS%2FNWQRhLCI7DYZCuOTuf2w7dL8j4CdpiwwzQg1diIus7dyViRfzpsFmuZKAXwL%2B84iBoVVqnJJZ4TgwH49NdfMAnc4Rgm%2Bo2a2nEcMjX%2FbQ3jRY%2B9WNVl96hzULGvLrVeyego2f06wivqmvxHA%3D': sys.stderr.write('Bad POST content: %s\n' % content) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') content = """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 3600, }""" request.send_header('Content-Length', len(content)) request.end_headers() request.wfile.write(content.encode('ascii')) handler.add('POST', '/oauth2/v4/token', custom_method = method) def method(request): if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization = 'Bearer ACCESS_TOKEN' if request.headers['Authorization'] != expected_authorization : sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler.add('GET', '/gs_fake_bucket/resource', custom_method = method) try: with webserver.install_http_handler(handler): f = open_for_read('/vsigs/gs_fake_bucket/resource') if f is None: gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) except: if gdal.GetLastErrorMsg().find('CPLRSASHA256Sign() not implemented') >= 0: return 'skip' finally: gdal.SetConfigOption('GO2A_AUD', None) gdal.SetConfigOption('GO2A_NOW', None) gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY', '') gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY_FILE', '') gdal.SetConfigOption('GS_OAUTH2_CLIENT_EMAIL', '') if data != 'foo': gdaltest.post_reason('fail') print(data) return 'fail' gdal.Unlink('/vsimem/pkey') return 'success'
def Calc(calc: MaybeSequence[str], outfile: Optional[PathLikeOrStr] = None, NoDataValue: Optional[Number] = None, type: Optional[Union[GDALDataType, str]] = None, format: Optional[str] = None, creation_options: Optional[Sequence[str]] = None, allBands: str = '', overwrite: bool = False, hideNoData: bool = False, projectionCheck: bool = False, color_table: Optional[ColorTableLike] = None, extent: Optional[Extent] = None, projwin: Optional[Union[Tuple, GeoRectangle]] = None, user_namespace: Optional[Dict] = None, debug: bool = False, quiet: bool = False, **input_files): if debug: print(f"gdal_calc.py starting calculation {calc}") if outfile and os.path.isfile(outfile) and not overwrite: if type or format or creation_options or hideNoData or extent or projwin: raise Exception( "One or several options implying file creation have been provided but Output file exists, must use --overwrite option!" ) # Single calc value compatibility if isinstance(calc, (list, tuple)): calc = calc else: calc = [calc] calc = [c.strip('"') for c in calc] creation_options = creation_options or [] # set up global namespace for eval with all functions of gdal_array, numpy global_namespace = { key: getattr(module, key) for module in [gdal_array, numpy] for key in dir(module) if not key.startswith('__') } if user_namespace: global_namespace.update(user_namespace) if not calc: raise Exception("No calculation provided.") elif not outfile and format.upper() != 'MEM': raise Exception("No output file provided.") if format is None: format = GetOutputDriverFor(outfile) if isinstance(extent, GeoRectangle): pass elif projwin: if isinstance(projwin, GeoRectangle): extent = projwin else: extent = GeoRectangle.from_lurd(*projwin) elif not extent: extent = Extent.IGNORE else: extent = extent_util.parse_extent(extent) compatible_gt_eps = 0.000001 gt_diff_support = { GT.INCOMPATIBLE_OFFSET: extent != Extent.FAIL, GT.INCOMPATIBLE_PIXEL_SIZE: False, GT.INCOMPATIBLE_ROTATION: False, GT.NON_ZERO_ROTATION: False, } gt_diff_error = { GT.INCOMPATIBLE_OFFSET: 'different offset', GT.INCOMPATIBLE_PIXEL_SIZE: 'different pixel size', GT.INCOMPATIBLE_ROTATION: 'different rotation', GT.NON_ZERO_ROTATION: 'non zero rotation', } ################################################################ # fetch details of input layers ################################################################ # set up some lists to store data for each band myFileNames = [] # input filenames myFiles = [] # input DataSets myBands = [] # input bands myAlphaList = [] # input alpha letter that represents each input file myDataType = [] # string representation of the datatype of each input file myDataTypeNum = [] # datatype of each input file myNDV = [] # nodatavalue for each input file DimensionsCheck = None # dimensions of the output Dimensions = [] # Dimensions of input files ProjectionCheck = None # projection of the output GeoTransformCheck = None # GeoTransform of the output GeoTransforms = [] # GeoTransform of each input file GeoTransformDiffer = False # True if we have inputs with different GeoTransforms myTempFileNames = [] # vrt filename from each input file myAlphaFileLists = [] # list of the Alphas which holds a list of inputs # loop through input files - checking dimensions for alphas, filenames in input_files.items(): if isinstance(filenames, (list, tuple)): # alpha is a list of files myAlphaFileLists.append(alphas) elif is_path_like(filenames) or isinstance(filenames, gdal.Dataset): # alpha is a single filename or a Dataset filenames = [filenames] alphas = [alphas] else: # I guess this alphas should be in the global_namespace, # It would have been better to pass it as user_namepsace, but I'll accept it anyway global_namespace[alphas] = filenames continue for alpha, filename in zip(alphas * len(filenames), filenames): if not alpha.endswith("_band"): # check if we have asked for a specific band... alpha_band = f"{alpha}_band" if alpha_band in input_files: myBand = input_files[alpha_band] else: myBand = 1 myF_is_ds = not is_path_like(filename) if myF_is_ds: myFile = filename filename = None else: myFile = open_ds(filename) if not myFile: raise IOError(f"No such file or directory: '{filename}'") myFileNames.append(filename) myFiles.append(myFile) myBands.append(myBand) myAlphaList.append(alpha) dt = myFile.GetRasterBand(myBand).DataType myDataType.append(gdal.GetDataTypeName(dt)) myDataTypeNum.append(dt) myNDV.append(None if hideNoData else myFile. GetRasterBand(myBand).GetNoDataValue()) # check that the dimensions of each layer are the same myFileDimensions = [myFile.RasterXSize, myFile.RasterYSize] if DimensionsCheck: if DimensionsCheck != myFileDimensions: GeoTransformDiffer = True if extent in [Extent.IGNORE, Extent.FAIL]: raise Exception( f"Error! Dimensions of file {filename} ({myFileDimensions[0]:d}, " f"{myFileDimensions[1]:d}) are different from other files " f"({DimensionsCheck[0]:d}, {DimensionsCheck[1]:d}). Cannot proceed" ) else: DimensionsCheck = myFileDimensions # check that the Projection of each layer are the same myProjection = myFile.GetProjection() if ProjectionCheck: if projectionCheck and ProjectionCheck != myProjection: raise Exception( f"Error! Projection of file {filename} {myProjection} " f"are different from other files {ProjectionCheck}. Cannot proceed" ) else: ProjectionCheck = myProjection # check that the GeoTransforms of each layer are the same myFileGeoTransform = myFile.GetGeoTransform( can_return_null=True) if extent == Extent.IGNORE: GeoTransformCheck = myFileGeoTransform else: Dimensions.append(myFileDimensions) GeoTransforms.append(myFileGeoTransform) if not GeoTransformCheck: GeoTransformCheck = myFileGeoTransform else: my_gt_diff = extent_util.gt_diff( GeoTransformCheck, myFileGeoTransform, eps=compatible_gt_eps, diff_support=gt_diff_support) if my_gt_diff not in [GT.SAME, GT.ALMOST_SAME]: GeoTransformDiffer = True if my_gt_diff != GT.COMPATIBLE_DIFF: raise Exception( f"Error! GeoTransform of file {filename} {myFileGeoTransform} is incompatible " f"({gt_diff_error[my_gt_diff]}), first file GeoTransform is {GeoTransformCheck}. " f"Cannot proceed") if debug: print( f"file {alpha}: {filename}, dimensions: " f"{DimensionsCheck[0]}, {DimensionsCheck[1]}, type: {myDataType[-1]}" ) # process allBands option allBandsIndex = None allBandsCount = 1 if allBands: if len(calc) > 1: raise Exception("Error! --allBands implies a single --calc") try: allBandsIndex = myAlphaList.index(allBands) except ValueError: raise Exception( f"Error! allBands option was given but Band {allBands} not found. Cannot proceed" ) allBandsCount = myFiles[allBandsIndex].RasterCount if allBandsCount <= 1: allBandsIndex = None else: allBandsCount = len(calc) if extent not in [ Extent.IGNORE, Extent.FAIL ] and (GeoTransformDiffer or isinstance(extent, GeoRectangle)): # mixing different GeoTransforms/Extents GeoTransformCheck, DimensionsCheck, ExtentCheck = extent_util.calc_geotransform_and_dimensions( GeoTransforms, Dimensions, extent) if GeoTransformCheck is None: raise Exception( "Error! The requested extent is empty. Cannot proceed") for i in range(len(myFileNames)): temp_vrt_filename, temp_vrt_ds = extent_util.make_temp_vrt( myFiles[i], ExtentCheck) myTempFileNames.append(temp_vrt_filename) myFiles[i] = None # close original ds myFiles[i] = temp_vrt_ds # replace original ds with vrt_ds # update the new precise dimensions and gt from the new ds GeoTransformCheck = temp_vrt_ds.GetGeoTransform() DimensionsCheck = [ temp_vrt_ds.RasterXSize, temp_vrt_ds.RasterYSize ] temp_vrt_ds = None ################################################################ # set up output file ################################################################ # open output file exists if outfile and os.path.isfile(outfile) and not overwrite: if allBandsIndex is not None: raise Exception( "Error! allBands option was given but Output file exists, must use --overwrite option!" ) if len(calc) > 1: raise Exception( "Error! multiple calc options were given but Output file exists, must use --overwrite option!" ) if debug: print( f"Output file {outfile} exists - filling in results into file") myOut = open_ds(outfile, access_mode=gdal.OF_UPDATE | gdal.OF_RASTER) if myOut is None: error = 'but cannot be opened for update' elif [myOut.RasterXSize, myOut.RasterYSize] != DimensionsCheck: error = 'but is the wrong size' elif ProjectionCheck and ProjectionCheck != myOut.GetProjection(): error = 'but is the wrong projection' elif GeoTransformCheck and GeoTransformCheck != myOut.GetGeoTransform( can_return_null=True): error = 'but is the wrong geotransform' else: error = None if error: raise Exception( f"Error! Output exists, {error}. Use the --overwrite option " f"to automatically overwrite the existing file") myOutB = myOut.GetRasterBand(1) myOutNDV = myOutB.GetNoDataValue() myOutType = myOutB.DataType else: if outfile: # remove existing file and regenerate if os.path.isfile(outfile): os.remove(outfile) # create a new file if debug: print(f"Generating output file {outfile}") else: outfile = '' # find data type to use if not type: # use the largest type of the input files if hasattr(gdal, 'DataTypeUnion'): myOutType = myDataTypeNum[0] for dt in myDataTypeNum: myOutType = gdal.DataTypeUnion(myOutType, dt) else: # GDAL < 3.5: not super reliable as it depends on the values of the GDALDataType enumeration ... myOutType = max(myDataTypeNum) else: myOutType = type if isinstance(myOutType, str): myOutType = gdal.GetDataTypeByName(myOutType) # create file myOutDrv = gdal.GetDriverByName(format) myOut = myOutDrv.Create(os.fspath(outfile), DimensionsCheck[0], DimensionsCheck[1], allBandsCount, myOutType, creation_options) # set output geo info based on first input layer if not GeoTransformCheck: GeoTransformCheck = myFiles[0].GetGeoTransform( can_return_null=True) if GeoTransformCheck: myOut.SetGeoTransform(GeoTransformCheck) if not ProjectionCheck: ProjectionCheck = myFiles[0].GetProjection() if ProjectionCheck: myOut.SetProjection(ProjectionCheck) if NoDataValue is None: myOutNDV = DefaultNDVLookup[ myOutType] # use the default noDataValue for this datatype elif isinstance(NoDataValue, str) and NoDataValue.lower() == 'none': myOutNDV = None # not to set any noDataValue else: myOutNDV = NoDataValue # use the given noDataValue for i in range(1, allBandsCount + 1): myOutB = myOut.GetRasterBand(i) if myOutNDV is not None: myOutB.SetNoDataValue(myOutNDV) if color_table: # set color table and color interpretation if is_path_like(color_table): color_table = get_color_table(color_table) myOutB.SetRasterColorTable(color_table) myOutB.SetRasterColorInterpretation(gdal.GCI_PaletteIndex) myOutB = None # write to band if hideNoData: myOutNDV = None myOutTypeName = gdal.GetDataTypeName(myOutType) if debug: print( f"output file: {outfile}, dimensions: {myOut.RasterXSize}, {myOut.RasterYSize}, type: {myOutTypeName}" ) ################################################################ # find block size to chop grids into bite-sized chunks ################################################################ # use the block size of the first layer to read efficiently myBlockSize = myFiles[0].GetRasterBand(myBands[0]).GetBlockSize() # find total x and y blocks to be read nXBlocks = (int)( (DimensionsCheck[0] + myBlockSize[0] - 1) / myBlockSize[0]) nYBlocks = (int)( (DimensionsCheck[1] + myBlockSize[1] - 1) / myBlockSize[1]) myBufSize = myBlockSize[0] * myBlockSize[1] if debug: print(f"using blocksize {myBlockSize[0]} x {myBlockSize[1]}") # variables for displaying progress ProgressCt = -1 ProgressMk = -1 ProgressEnd = nXBlocks * nYBlocks * allBandsCount ################################################################ # start looping through each band in allBandsCount ################################################################ for bandNo in range(1, allBandsCount + 1): ################################################################ # start looping through blocks of data ################################################################ # store these numbers in variables that may change later nXValid = myBlockSize[0] nYValid = myBlockSize[1] count_file_per_alpha = {} largest_datatype_per_alpha = {} for i, Alpha in enumerate(myAlphaList): if Alpha in myAlphaFileLists: # populate lettered arrays with values if allBandsIndex is not None and allBandsIndex == i: myBandNo = bandNo else: myBandNo = myBands[i] band = myFiles[i].GetRasterBand(myBandNo) if Alpha not in count_file_per_alpha: count_file_per_alpha[Alpha] = 1 largest_datatype_per_alpha[Alpha] = band.DataType else: count_file_per_alpha[Alpha] += 1 if hasattr(gdal, 'DataTypeUnion'): largest_datatype_per_alpha[Alpha] = gdal.DataTypeUnion( largest_datatype_per_alpha[Alpha], band.DataType) # loop through X-lines for X in range(0, nXBlocks): # in case the blocks don't fit perfectly # change the block size of the final piece if X == nXBlocks - 1: nXValid = DimensionsCheck[0] - X * myBlockSize[0] # find X offset myX = X * myBlockSize[0] # reset buffer size for start of Y loop nYValid = myBlockSize[1] myBufSize = nXValid * nYValid # loop through Y lines for Y in range(0, nYBlocks): ProgressCt += 1 if 10 * ProgressCt / ProgressEnd % 10 != ProgressMk and not quiet: ProgressMk = 10 * ProgressCt / ProgressEnd % 10 print("%d.." % (10 * ProgressMk), end=" ") # change the block size of the final piece if Y == nYBlocks - 1: nYValid = DimensionsCheck[1] - Y * myBlockSize[1] myBufSize = nXValid * nYValid # find Y offset myY = Y * myBlockSize[1] # create empty buffer to mark where nodata occurs myNDVs = None # make local namespace for calculation local_namespace = {} # Create destination numpy arrays for each alpha numpy_arrays = {} counter_per_alpha = {} for Alpha in count_file_per_alpha: dtype = gdal_array.GDALTypeCodeToNumericTypeCode( largest_datatype_per_alpha[Alpha]) if count_file_per_alpha[Alpha] == 1: numpy_arrays[Alpha] = numpy.empty((nYValid, nXValid), dtype=dtype) else: numpy_arrays[Alpha] = numpy.empty( (count_file_per_alpha[Alpha], nYValid, nXValid), dtype=dtype) counter_per_alpha[Alpha] = 0 # fetch data for each input layer for i, Alpha in enumerate(myAlphaList): # populate lettered arrays with values if allBandsIndex is not None and allBandsIndex == i: myBandNo = bandNo else: myBandNo = myBands[i] if Alpha in myAlphaFileLists: if count_file_per_alpha[Alpha] == 1: buf_obj = numpy_arrays[Alpha] else: buf_obj = numpy_arrays[Alpha][ counter_per_alpha[Alpha]] myval = gdal_array.BandReadAsArray( myFiles[i].GetRasterBand(myBandNo), xoff=myX, yoff=myY, win_xsize=nXValid, win_ysize=nYValid, buf_obj=buf_obj) counter_per_alpha[Alpha] += 1 else: myval = gdal_array.BandReadAsArray( myFiles[i].GetRasterBand(myBandNo), xoff=myX, yoff=myY, win_xsize=nXValid, win_ysize=nYValid) if myval is None: raise Exception( f'Input block reading failed from filename {filename[i]}' ) # fill in nodata values if myNDV[i] is not None: # myNDVs is a boolean buffer. # a cell equals to 1 if there is NDV in any of the corresponding cells in input raster bands. if myNDVs is None: # this is the first band that has NDV set. we initializes myNDVs to a zero buffer # as we didn't see any NDV value yet. myNDVs = numpy.zeros(myBufSize) myNDVs.shape = (nYValid, nXValid) myNDVs = 1 * numpy.logical_or(myNDVs == 1, myval == myNDV[i]) # add an array of values for this block to the eval namespace if not Alpha in myAlphaFileLists: local_namespace[Alpha] = myval myval = None for lst in myAlphaFileLists: local_namespace[lst] = numpy_arrays[lst] # try the calculation on the array blocks this_calc = calc[bandNo - 1 if len(calc) > 1 else 0] try: myResult = eval(this_calc, global_namespace, local_namespace) except: print(f"evaluation of calculation {this_calc} failed") raise # Propagate nodata values (set nodata cells to zero # then add nodata value to these cells). if myNDVs is not None and myOutNDV is not None: myResult = ( (1 * (myNDVs == 0)) * myResult) + (myOutNDV * myNDVs) elif not isinstance(myResult, numpy.ndarray): myResult = numpy.ones((nYValid, nXValid)) * myResult # write data block to the output file myOutB = myOut.GetRasterBand(bandNo) if gdal_array.BandWriteArray( myOutB, myResult, xoff=myX, yoff=myY) != 0: raise Exception('Block writing failed') myOutB = None # write to band # remove temp files for idx, tempFile in enumerate(myTempFileNames): myFiles[idx] = None os.remove(tempFile) gdal.ErrorReset() myOut.FlushCache() if gdal.GetLastErrorMsg() != '': raise Exception('Dataset writing failed') if not quiet: print("100 - Done") return myOut
def transformer_12(): ds = gdal.Open(""" <VRTDataset rasterXSize="20" rasterYSize="20"> <GCPList Projection="PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26711"]]"> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <GCP Id="" Pixel="20" Line="0" X="20" Y="0"/> <GCP Id="" Pixel="0" Line="20" X="0" Y="20"/> <GCP Id="" Pixel="20" Line="20" X="20" Y="20"/> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <!-- duplicate entry --> </GCPList> <VRTRasterBand dataType="Byte" band="1"> <ColorInterp>Gray</ColorInterp> <SimpleSource> <SourceFilename relativeToVRT="1">data/byte.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") tr = gdal.Transformer(ds, None, ['METHOD=GCP_TPS']) if tr is None: gdaltest.post_reason('fail') return 'fail' ds = gdal.Open(""" <VRTDataset rasterXSize="20" rasterYSize="20"> <GCPList Projection="PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26711"]]"> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <GCP Id="" Pixel="20" Line="0" X="20" Y="0"/> <GCP Id="" Pixel="0" Line="20" X="0" Y="20"/> <GCP Id="" Pixel="20" Line="20" X="20" Y="20"/> <GCP Id="" Pixel="0" Line="0" X="10" Y="10"/> <!-- same pixel,line --> </GCPList> <VRTRasterBand dataType="Byte" band="1"> <ColorInterp>Gray</ColorInterp> <SimpleSource> <SourceFilename relativeToVRT="1">data/byte.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") gdal.ErrorReset() with gdaltest.error_handler(): tr = gdal.Transformer(ds, None, ['METHOD=GCP_TPS']) if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds = gdal.Open(""" <VRTDataset rasterXSize="20" rasterYSize="20"> <GCPList Projection="PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26711"]]"> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <GCP Id="" Pixel="20" Line="0" X="20" Y="0"/> <GCP Id="" Pixel="0" Line="20" X="0" Y="20"/> <GCP Id="" Pixel="20" Line="20" X="20" Y="20"/> <GCP Id="" Pixel="10" Line="10" X="20" Y="20"/> <!-- same X,Y --> </GCPList> <VRTRasterBand dataType="Byte" band="1"> <ColorInterp>Gray</ColorInterp> <SimpleSource> <SourceFilename relativeToVRT="1">data/byte.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") gdal.ErrorReset() with gdaltest.error_handler(): tr = gdal.Transformer(ds, None, ['METHOD=GCP_TPS']) if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' return 'success'
def test_derived_test1(): filename = "../gcore/data/cfloat64.tif" gdal.ErrorReset() ds = gdal.Open(filename) assert ds is not None and gdal.GetLastErrorMsg() == '' got_dsds = ds.GetMetadata('DERIVED_SUBDATASETS') expected_gt = ds.GetGeoTransform() expected_prj = ds.GetProjection() expected_dsds = { 'DERIVED_SUBDATASET_1_NAME': 'DERIVED_SUBDATASET:AMPLITUDE:../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_1_DESC': 'Amplitude of input bands from ../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_2_NAME': 'DERIVED_SUBDATASET:PHASE:../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_2_DESC': 'Phase of input bands from ../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_3_NAME': 'DERIVED_SUBDATASET:REAL:../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_3_DESC': 'Real part of input bands from ../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_4_NAME': 'DERIVED_SUBDATASET:IMAG:../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_4_DESC': 'Imaginary part of input bands from ../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_5_NAME': 'DERIVED_SUBDATASET:CONJ:../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_5_DESC': 'Conjugate of input bands from ../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_6_NAME': 'DERIVED_SUBDATASET:INTENSITY:../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_6_DESC': 'Intensity (squared amplitude) of input bands from ../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_7_NAME': 'DERIVED_SUBDATASET:LOGAMPLITUDE:../gcore/data/cfloat64.tif', 'DERIVED_SUBDATASET_7_DESC': 'log10 of amplitude of input bands from ../gcore/data/cfloat64.tif' } if got_dsds != expected_dsds: import pprint pprint.pprint(got_dsds) pytest.fail() for key in expected_dsds: val = expected_dsds[key] if key.endswith('_NAME'): ds = gdal.Open(val) assert ds is not None and gdal.GetLastErrorMsg() == '' gt = ds.GetGeoTransform() if gt != expected_gt: import pprint pprint.pprint("Expected geotransform: " + str(expected_gt) + ", got " + str(gt)) pytest.fail() prj = ds.GetProjection() if prj != expected_prj: import pprint pprint.pprint("Expected projection: " + str(expected_prj) + ", got: " + str(gt)) pytest.fail()
def test_derived_test2(): filename = "../gcore/data/cint_sar.tif" gdal.ErrorReset() ds = gdal.Open(filename) assert ds is not None and gdal.GetLastErrorMsg() == '' got_dsds = ds.GetMetadata('DERIVED_SUBDATASETS') expected_dsds = { 'DERIVED_SUBDATASET_1_NAME': 'DERIVED_SUBDATASET:AMPLITUDE:../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_1_DESC': 'Amplitude of input bands from ../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_2_NAME': 'DERIVED_SUBDATASET:PHASE:../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_2_DESC': 'Phase of input bands from ../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_3_NAME': 'DERIVED_SUBDATASET:REAL:../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_3_DESC': 'Real part of input bands from ../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_4_NAME': 'DERIVED_SUBDATASET:IMAG:../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_4_DESC': 'Imaginary part of input bands from ../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_5_NAME': 'DERIVED_SUBDATASET:CONJ:../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_5_DESC': 'Conjugate of input bands from ../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_6_NAME': 'DERIVED_SUBDATASET:INTENSITY:../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_6_DESC': 'Intensity (squared amplitude) of input bands from ../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_7_NAME': 'DERIVED_SUBDATASET:LOGAMPLITUDE:../gcore/data/cint_sar.tif', 'DERIVED_SUBDATASET_7_DESC': 'log10 of amplitude of input bands from ../gcore/data/cint_sar.tif' } expected_cs = { 'DERIVED_SUBDATASET_1_NAME': 345, 'DERIVED_SUBDATASET_2_NAME': 10, 'DERIVED_SUBDATASET_3_NAME': 159, 'DERIVED_SUBDATASET_4_NAME': 142, 'DERIVED_SUBDATASET_5_NAME': 110, 'DERIVED_SUBDATASET_6_NAME': 314, 'DERIVED_SUBDATASET_7_NAME': 55 } if got_dsds != expected_dsds: import pprint pprint.pprint(got_dsds) pytest.fail() for key in expected_dsds: val = expected_dsds[key] if key.endswith('_NAME'): ds = gdal.Open(val) assert ds is not None and gdal.GetLastErrorMsg() == '' cs = ds.GetRasterBand(1).Checksum() if expected_cs[key] != cs: import pprint pprint.pprint("Expected checksum " + str(expected_cs[key]) + ", got " + str(cs)) pytest.fail()
def validate(ds, check_tiled=True): """Check if a file is a (Geo)TIFF with cloud optimized compatible structure. Args: ds: GDAL Dataset for the file to inspect. check_tiled: Set to False to ignore missing tiling. Returns: A tuple, whose first element is an array of error messages (empty if there is no error), and the second element, a dictionary with the structure of the GeoTIFF file. Raises: ValidateCloudOptimizedGeoTIFFException: Unable to open the file or the file is not a Tiff. """ if int(gdal.VersionInfo('VERSION_NUM')) < 2020000: raise ValidateCloudOptimizedGeoTIFFException( 'GDAL 2.2 or above required') unicode_type = type(''.encode('utf-8').decode('utf-8')) if isinstance(ds, str) or isinstance(ds, unicode_type): gdal.PushErrorHandler() ds = gdal.Open(ds) gdal.PopErrorHandler() if ds is None: raise ValidateCloudOptimizedGeoTIFFException( 'Invalid file : %s' % gdal.GetLastErrorMsg()) if ds.GetDriver().ShortName != 'GTiff': raise ValidateCloudOptimizedGeoTIFFException( 'The file is not a GeoTIFF') details = {} errors = [] filename = ds.GetDescription() main_band = ds.GetRasterBand(1) ovr_count = main_band.GetOverviewCount() filelist = ds.GetFileList() if filelist is not None and filename + '.ovr' in filelist: errors += [ 'Overviews found in external .ovr file. They should be internal' ] if main_band.XSize >= 512 or main_band.YSize >= 512: if check_tiled: block_size = main_band.GetBlockSize() if block_size[0] == main_band.XSize and block_size[0] > 1024: errors += [ 'The file is greater than 512xH or Wx512, but is not tiled' ] if ovr_count == 0: errors += [ 'The file is greater than 512xH or Wx512, but has no overviews' ] ifd_offset = int(main_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) ifd_offsets = [ifd_offset] if ifd_offset not in (8, 16): errors += [ 'The offset of the main IFD should be 8 for ClassicTIFF ' 'or 16 for BigTIFF. It is %d instead' % ifd_offsets[0] ] details['ifd_offsets'] = {} details['ifd_offsets']['main'] = ifd_offset for i in range(ovr_count): # Check that overviews are by descending sizes ovr_band = ds.GetRasterBand(1).GetOverview(i) if i == 0: if (ovr_band.XSize > main_band.XSize or ovr_band.YSize > main_band.YSize): errors += [ 'First overview has larger dimension than main band' ] else: prev_ovr_band = ds.GetRasterBand(1).GetOverview(i - 1) if (ovr_band.XSize > prev_ovr_band.XSize or ovr_band.YSize > prev_ovr_band.YSize): errors += [ 'Overview of index %d has larger dimension than ' 'overview of index %d' % (i, i - 1) ] if check_tiled: block_size = ovr_band.GetBlockSize() if block_size[0] == ovr_band.XSize and block_size[0] > 1024: errors += ['Overview of index %d is not tiled' % i] # Check that the IFD of descending overviews are sorted by increasing # offsets ifd_offset = int(ovr_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) ifd_offsets.append(ifd_offset) details['ifd_offsets']['overview_%d' % i] = ifd_offset if ifd_offsets[-1] < ifd_offsets[-2]: if i == 0: errors += [ 'The offset of the IFD for overview of index %d is %d, ' 'whereas it should be greater than the one of the main ' 'image, which is at byte %d' % (i, ifd_offsets[-1], ifd_offsets[-2]) ] else: errors += [ 'The offset of the IFD for overview of index %d is %d, ' 'whereas it should be greater than the one of index %d, ' 'which is at byte %d' % (i, ifd_offsets[-1], i - 1, ifd_offsets[-2]) ] # Check that the imagery starts by the smallest overview and ends with # the main resolution dataset block_offset = main_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') if not block_offset: errors += ['Missing BLOCK_OFFSET_0_0'] data_offset = int(block_offset) if block_offset else None data_offsets = [data_offset] details['data_offsets'] = {} details['data_offsets']['main'] = data_offset for i in range(ovr_count): ovr_band = ds.GetRasterBand(1).GetOverview(i) data_offset = int(ovr_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF')) data_offsets.append(data_offset) details['data_offsets']['overview_%d' % i] = data_offset if data_offsets[-1] < ifd_offsets[-1]: if ovr_count > 0: errors += [ 'The offset of the first block of the smallest overview ' 'should be after its IFD' ] else: errors += [ 'The offset of the first block of the image should ' 'be after its IFD' ] for i in range(len(data_offsets) - 2, 0, -1): if data_offsets[i] < data_offsets[i + 1]: errors += [ 'The offset of the first block of overview of index %d should ' 'be after the one of the overview of index %d' % (i - 1, i) ] if len(data_offsets) >= 2 and data_offsets[0] < data_offsets[1]: errors += [ 'The offset of the first block of the main resolution image' 'should be after the one of the overview of index %d' % (ovr_count - 1) ] return errors, details
def test_vrtmultidim_dimension(): ds = gdal.OpenEx( """<VRTDataset> <Group name="/"> <Dimension name="X" size="2" type="foo" direction="bar" indexingVariable="X"/> <Dimension name="Y" size="1234567890123"/> </Group> </VRTDataset>""", gdal.OF_MULTIDIM_RASTER) assert ds rg = ds.GetRootGroup() assert rg assert rg.GetName() == '/' dims = rg.GetDimensions() assert len(dims) == 2 dim_0 = dims[0] assert dim_0.GetName() == 'X' assert dim_0.GetSize() == 2 assert dim_0.GetType() == 'foo' assert dim_0.GetDirection() == 'bar' with gdaltest.error_handler(): gdal.ErrorReset() assert not dim_0.GetIndexingVariable() assert gdal.GetLastErrorMsg() == 'Cannot find variable X' dim_1 = dims[1] assert dim_1.GetName() == 'Y' assert dim_1.GetSize() == 1234567890123 with gdaltest.error_handler(): ds = gdal.OpenEx( """<VRTDataset> <Group MISSING_name="/"> </Group> </VRTDataset>""", gdal.OF_MULTIDIM_RASTER) assert not ds with gdaltest.error_handler(): ds = gdal.OpenEx( """<VRTDataset> <Group name="INVALID"> </Group> </VRTDataset>""", gdal.OF_MULTIDIM_RASTER) assert not ds with gdaltest.error_handler(): ds = gdal.OpenEx( """<VRTDataset> <Group name="/"> <Dimension MISSING_name="X" size="1"/> </Group> </VRTDataset>""", gdal.OF_MULTIDIM_RASTER) assert not ds with gdaltest.error_handler(): ds = gdal.OpenEx( """<VRTDataset> <Group name="/"> <Dimension name="X" MISSING_size="1"/> </Group> </VRTDataset>""", gdal.OF_MULTIDIM_RASTER) assert not ds
import shutil try: shutil.rmtree('tmppamproxydir') except: pass os.mkdir('tmppamproxydir') gdal.SetConfigOption('GDAL_PAM_PROXY_DIR', 'tmppamproxydir') # Compute statistics. They should be saved in the .aux.xml in the proxyDB ds = gdal.Open('tmpdirreadonly/byte.tif') stats = ds.GetRasterBand(1).ComputeStatistics(False) gdal.ErrorReset() ds = None error_msg = gdal.GetLastErrorMsg() if error_msg != '': print('did not expected error message') sys.exit(1) # Check that the .aux.xml in the proxyDB exists filelist = gdal.ReadDir('tmppamproxydir') if not '000000_tmpdirreadonly_byte.tif.aux.xml' in filelist: print( 'did not get find 000000_tmpdirreadonly_byte.tif.aux.xml on filesystem' ) sys.exit(1) # Test altering a value to check that the file will be used f = open('tmppamproxydir/000000_tmpdirreadonly_byte.tif.aux.xml', 'w') f.write("""<PAMDataset>
def test_basic_test_11(): ds = gdal.OpenEx('data/byte.tif') assert ds is not None ds = gdal.OpenEx('data/byte.tif', gdal.OF_RASTER) assert ds is not None ds = gdal.OpenEx('data/byte.tif', gdal.OF_VECTOR) assert ds is None ds = gdal.OpenEx('data/byte.tif', gdal.OF_RASTER | gdal.OF_VECTOR) assert ds is not None ds = gdal.OpenEx('data/byte.tif', gdal.OF_ALL) assert ds is not None ds = gdal.OpenEx('data/byte.tif', gdal.OF_UPDATE) assert ds is not None ds = gdal.OpenEx( 'data/byte.tif', gdal.OF_RASTER | gdal.OF_VECTOR | gdal.OF_UPDATE | gdal.OF_VERBOSE_ERROR) assert ds is not None ds = gdal.OpenEx('data/byte.tif', allowed_drivers=[]) assert ds is not None ds = gdal.OpenEx('data/byte.tif', allowed_drivers=['GTiff']) assert ds is not None ds = gdal.OpenEx('data/byte.tif', allowed_drivers=['PNG']) assert ds is None with gdaltest.error_handler(): ds = gdal.OpenEx('data/byte.tif', open_options=['FOO']) assert ds is not None ar_ds = [gdal.OpenEx('data/byte.tif', gdal.OF_SHARED) for _ in range(1024)] assert ar_ds[1023] is not None ar_ds = None ds = gdal.OpenEx('../ogr/data/poly.shp', gdal.OF_RASTER) assert ds is None ds = gdal.OpenEx('../ogr/data/poly.shp', gdal.OF_VECTOR) assert ds is not None assert ds.GetLayerCount() == 1 assert ds.GetLayer(0) is not None ds.GetLayer(0).GetMetadata() ds = gdal.OpenEx('../ogr/data/poly.shp', allowed_drivers=['ESRI Shapefile']) assert ds is not None ds = gdal.OpenEx('../ogr/data/poly.shp', gdal.OF_RASTER | gdal.OF_VECTOR) assert ds is not None ds = gdal.OpenEx('non existing') assert ds is None and gdal.GetLastErrorMsg() == '' gdal.PushErrorHandler('CPLQuietErrorHandler') ds = gdal.OpenEx('non existing', gdal.OF_VERBOSE_ERROR) gdal.PopErrorHandler() assert ds is None and gdal.GetLastErrorMsg() != '' old_use_exceptions_status = gdal.GetUseExceptions() gdal.UseExceptions() got_exception = False try: ds = gdal.OpenEx('non existing') except RuntimeError: got_exception = True if old_use_exceptions_status == 0: gdal.DontUseExceptions() assert got_exception
def misc_6_internal(datatype, nBands, setDriversDone): ds = gdal.GetDriverByName('MEM').Create('', 10, 10, nBands, datatype) if nBands > 0: ds.GetRasterBand(1).Fill(255) ds.SetGeoTransform([2, 1.0 / 10, 0, 49, 0, -1.0 / 10]) ds.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]' ) ds.SetMetadata(['a']) for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) md = drv.GetMetadata() if ('DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md) and 'DCAP_RASTER' in md: # print ('drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) skip = False # FIXME: A few cases that crashes and should be investigated if drv.ShortName == 'JPEG2000': if (nBands == 2 or nBands >= 5) or \ not (datatype == gdal.GDT_Byte or datatype == gdal.GDT_Int16 or datatype == gdal.GDT_UInt16): skip = True if skip is False: dirname = 'tmp/tmp/tmp_%s_%d_%s' % ( drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) try: os.mkdir(dirname) except OSError: try: os.stat(dirname) # Hum the directory already exists... Not expected, but let's try to go on except OSError: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' filename = get_filename(drv, dirname) dst_ds = drv.CreateCopy(filename, ds) has_succeeded = dst_ds is not None dst_ds = None size = 0 stat = gdal.VSIStatL(filename) if stat is not None: size = stat.size try: shutil.rmtree(dirname) except OSError: reason = 'Cannot remove %s after drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' if has_succeeded and drv.ShortName not in setDriversDone and nBands > 0: setDriversDone.add(drv.ShortName) # The first list of drivers fail to detect short writing # The second one is because they are verbose in stderr if 'DCAP_VIRTUALIO' in md and size != 0 and \ drv.ShortName not in ['JPEG2000', 'KMLSUPEROVERLAY', 'HF2', 'ZMap', 'DDS'] and \ drv.ShortName not in ['GIF', 'JP2ECW', 'JP2Lura']: for j in range(10): truncated_size = (size * j) / 10 vsimem_filename = ( '/vsimem/test_truncate/||maxlength=%d||' % truncated_size) + get_filename(drv, '')[1:] # print('drv = %s, nBands = %d, datatype = %s, truncated_size = %d' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype), truncated_size)) dst_ds = drv.CreateCopy(vsimem_filename, ds) error_detected = False if dst_ds is None: error_detected = True else: gdal.ErrorReset() dst_ds = None if gdal.GetLastErrorMsg() != '': error_detected = True if not error_detected: msg = 'write error not decteded with with drv = %s, nBands = %d, datatype = %s, truncated_size = %d' % ( drv.ShortName, nBands, gdal.GetDataTypeName(datatype), truncated_size) print(msg) gdaltest.post_reason(msg) fl = gdal.ReadDirRecursive('/vsimem/test_truncate') if fl is not None: for myf in fl: gdal.Unlink('/vsimem/test_truncate/' + myf) fl = gdal.ReadDirRecursive( '/vsimem/test_truncate') if fl is not None: print(fl) if drv.ShortName not in [ 'ECW', 'JP2ECW', 'VRT', 'XPM', 'JPEG2000', 'FIT', 'RST', 'INGR', 'USGSDEM', 'KMLSUPEROVERLAY', 'GMT' ]: dst_ds = drv.CreateCopy( filename, ds, callback=misc_6_interrupt_callback_class().cbk) if dst_ds is not None: gdaltest.post_reason( 'interruption did not work with drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) dst_ds = None try: shutil.rmtree(dirname) except OSError: pass return 'fail' dst_ds = None try: shutil.rmtree(dirname) except OSError: pass try: os.mkdir(dirname) except OSError: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' ds = None return 'success'
def plmosaic_16(): if gdaltest.plmosaic_drv is None: return 'skip' try: shutil.rmtree('tmp/plmosaic_cache') except OSError: pass gdal.FileFromMemBuffer( '/vsimem/root/?name__is=my_mosaic', """{ "mosaics": [{ "id": "my_mosaic_id", "name": "my_mosaic", "coordinate_system": "EPSG:3857", "datatype": "byte", "grid": { "quad_size": 4096, "resolution": 4.77731426716 }, "first_acquired": "first_date", "last_acquired": "last_date", "_links" : { "tiles" : "/vsimem/root/my_mosaic/tiles{0-3}/{z}/{x}/{y}.png" }, "quad_download": true }] }""") # Valid root: one single mosaic, should open the dataset directly gdal.FileFromMemBuffer( '/vsimem/root', """{ "mosaics": [ { "id": "my_mosaic_id", "name": "my_mosaic", "coordinate_system": "EPSG:3857", "_links" : { "_self": "/vsimem/root/my_mosaic" }, "quad_download": true } ], }""") gdal.SetConfigOption('PL_URL', '/vsimem/root') gdal.PushErrorHandler() ds = gdal.OpenEx('PLMosaic:api_key=foo,unsupported_option=val', gdal.OF_RASTER) gdal.PopErrorHandler() gdal.SetConfigOption('PL_URL', None) if ds is not None or gdal.GetLastErrorMsg().find( 'Unsupported option unsupported_option') < 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.SetConfigOption('PL_URL', '/vsimem/root') ds = gdal.OpenEx('PLMosaic:', gdal.OF_RASTER, open_options=['API_KEY=foo']) gdal.SetConfigOption('PL_URL', None) if ds.GetMetadata("SUBDATASETS") != {}: gdaltest.post_reason('fail') print(ds.GetMetadata("SUBDATASETS")) return 'fail' if ds.GetMetadata() != { 'LAST_ACQUIRED': 'last_date', 'NAME': 'my_mosaic', 'FIRST_ACQUIRED': 'first_date' }: gdaltest.post_reason('fail') print(ds.GetMetadata()) return 'fail' ds = None return 'success'
def misc_12(): if int(gdal.VersionInfo('VERSION_NUM')) < 1900: gdaltest.post_reason('would crash') return 'skip' import test_cli_utilities gdal_translate_path = test_cli_utilities.get_gdal_translate_path() for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) md = drv.GetMetadata() if ('DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md) and 'DCAP_RASTER' in md: nbands = 1 if drv.ShortName == 'WEBP' or drv.ShortName == 'ADRG': nbands = 3 datatype = gdal.GDT_Byte if drv.ShortName == 'BT' or drv.ShortName == 'BLX': datatype = gdal.GDT_Int16 elif drv.ShortName == 'GTX' or drv.ShortName == 'NTv2' or drv.ShortName == 'Leveller': datatype = gdal.GDT_Float32 size = 1201 if drv.ShortName == 'BLX': size = 128 src_ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/misc_12_src.tif', size, size, nbands, datatype) set_gt = (2, 1.0 / size, 0, 49, 0, -1.0 / size) src_ds.SetGeoTransform(set_gt) src_ds.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]' ) # Test to detect crashes gdal.PushErrorHandler('CPLQuietErrorHandler') ds = drv.CreateCopy('/nonexistingpath' + get_filename(drv, ''), src_ds) gdal.PopErrorHandler() if ds is None and gdal.GetLastErrorMsg() == '': gdaltest.post_reason('failure') print( 'CreateCopy() into non existing dir fails without error message for driver %s' % drv.ShortName) return 'fail' ds = None if gdal_translate_path is not None: # Test to detect memleaks ds = gdal.GetDriverByName('VRT').CreateCopy( 'tmp/misc_12.vrt', src_ds) (out, err) = gdaltest.runexternal_out_and_err( gdal_translate_path + ' -of ' + drv.ShortName + ' tmp/misc_12.vrt /nonexistingpath/' + get_filename(drv, ''), check_memleak=False) del ds gdal.Unlink('tmp/misc_12.vrt') # If DEBUG_VSIMALLOC_STATS is defined, this is an easy way # to catch some memory leaks if out.find('VSIMalloc + VSICalloc - VSIFree') != -1 and \ out.find('VSIMalloc + VSICalloc - VSIFree : 0') == -1: if drv.ShortName == 'Rasterlite' and out.find( 'VSIMalloc + VSICalloc - VSIFree : 1') != -1: pass else: print('memleak detected for driver %s' % drv.ShortName) src_ds = None gdal.Unlink('/vsimem/misc_12_src.tif') return 'success'
def plmosaic_21(): if gdaltest.plmosaic_drv is None: return 'skip' gdal.SetConfigOption('PL_URL', '/vsimem/root') ds = gdal.OpenEx('PLMosaic:', gdal.OF_RASTER, open_options=[ 'API_KEY=foo', 'MOSAIC=my_mosaic', 'CACHE_PATH=', 'USE_TILES=YES' ]) gdal.SetConfigOption('PL_URL', None) gdal.ErrorReset() gdal.PushErrorHandler() ds.ReadRaster(256, 512, 1, 1) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() gdal.PushErrorHandler() ds.GetRasterBand(1).ReadRaster(256, 512, 1, 1) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() gdal.PushErrorHandler() ds.GetRasterBand(1).ReadBlock(1, 2) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( '/vsimem/root/?name__is=mosaic_uint16', """{ "mosaics": [{ "id": "mosaic_uint16", "name": "mosaic_uint16", "coordinate_system": "EPSG:3857", "datatype": "uint16", "grid": { "quad_size": 4096, "resolution": 4.77731426716 }, "first_acquired": "first_date", "last_acquired": "last_date", "_links" : { "tiles" : "/vsimem/root/mosaic_uint16/tiles{0-3}/{z}/{x}/{y}.png" }, "quad_download": true }] }""") # Should emit a warning gdal.ErrorReset() gdal.PushErrorHandler() gdal.SetConfigOption('PL_URL', '/vsimem/root') ds = gdal.OpenEx('PLMosaic:', gdal.OF_RASTER, open_options=[ 'API_KEY=foo', 'MOSAIC=mosaic_uint16', 'CACHE_PATH=', 'USE_TILES=YES' ]) gdal.SetConfigOption('PL_URL', None) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Cannot use tile API for full resolution data on non Byte mosaic' ) < 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/root/?name__is=mosaic_without_tiles', """{ "mosaics": [{ "id": "mosaic_without_tiles", "name": "mosaic_without_tiles", "coordinate_system": "EPSG:3857", "datatype": "byte", "grid": { "quad_size": 4096, "resolution": 4.77731426716 }, "first_acquired": "first_date", "last_acquired": "last_date", "quad_download": true }] }""") # Should emit a warning gdal.ErrorReset() gdal.PushErrorHandler() gdal.SetConfigOption('PL_URL', '/vsimem/root') ds = gdal.OpenEx('PLMosaic:', gdal.OF_RASTER, open_options=[ 'API_KEY=foo', 'MOSAIC=mosaic_without_tiles', 'CACHE_PATH=', 'USE_TILES=YES' ]) gdal.SetConfigOption('PL_URL', None) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Cannot find tile definition, so use_tiles will be ignored') < 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' return 'success'
def validate(filename, check_tiled=True): """ Return if the passed file is a (Geo)TIFF file with a cloud optimized compatible structure, otherwise raise a ValidateCloudOptimizedGeoTIFFException exception. """ if int(gdal.VersionInfo('VERSION_NUM')) < 2020000: raise ValidateCloudOptimizedGeoTIFFException( "GDAL 2.2 or above required") gdal.PushErrorHandler() ds = gdal.Open(filename) gdal.PopErrorHandler() if ds is None: raise ValidateCloudOptimizedGeoTIFFException("Invalid file : %s" % gdal.GetLastErrorMsg()) if ds.GetDriver().ShortName != 'GTiff': raise ValidateCloudOptimizedGeoTIFFException( "The file is not a GeoTIFF") main_band = ds.GetRasterBand(1) ovr_count = main_band.GetOverviewCount() if filename + '.ovr' in ds.GetFileList(): raise ValidateCloudOptimizedGeoTIFFException( "Overviews should be internal") if check_tiled: block_size = main_band.GetBlockSize() if block_size[0] == main_band.XSize: raise ValidateCloudOptimizedGeoTIFFException( "Full resolution image is not tiled") if main_band.XSize >= 512 or main_band.YSize >= 512: if ovr_count == 0: raise ValidateCloudOptimizedGeoTIFFException( "The file should have overviews") ifd_offset = \ [int(main_band.GetMetadataItem('IFD_OFFSET', 'TIFF'))] if ifd_offset[0] != 8: raise ValidateCloudOptimizedGeoTIFFException( "The offset of the main IFD should be 8. It is %d instead" % ifd_offset[0]) for i in range(ovr_count): # Check that overviews are by descending sizes ovr_band = ds.GetRasterBand(1).GetOverview(i) if i == 0: if ovr_band.XSize > main_band.XSize or \ ovr_band.YSize > main_band.YSize: raise ValidateCloudOptimizedGeoTIFFException( "First overview has larger dimension than main band") else: prev_ovr_band = ds.GetRasterBand(1).GetOverview(i - 1) if ovr_band.XSize > prev_ovr_band.XSize or \ ovr_band.YSize > prev_ovr_band.YSize: raise ValidateCloudOptimizedGeoTIFFException( "Overview of index %d has larger dimension than " "overview of index %d" % (i, i - 1)) if check_tiled: block_size = ovr_band.GetBlockSize() if block_size[0] == ovr_band.XSize: raise ValidateCloudOptimizedGeoTIFFException( "Overview of index %d is not tiled" % i) # Check that the IFD of descending overviews are sorted by increasing # offsets ifd_offset.append(int(ovr_band.GetMetadataItem('IFD_OFFSET', 'TIFF'))) if ifd_offset[-1] < ifd_offset[-2]: if i == 0: raise ValidateCloudOptimizedGeoTIFFException( "The offset of the IFD for overview of index %d is %d, " "whereas it should be greater than the one of the main " "image, which is at byte %d" % (i, ifd_offset[-1], ifd_offset[-2])) else: raise ValidateCloudOptimizedGeoTIFFException( "The offset of the IFD for overview of index %d is %d, " "whereas it should be greater than the one of index %d, " "which is at byte %d" % (i, ifd_offset[-1], i - 1, ifd_offset[-2])) # Check that the imagery starts by the smallest overview and ends with # the main resolution dataset data_offset = [int(main_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF'))] for i in range(ovr_count): ovr_band = ds.GetRasterBand(1).GetOverview(i) data_offset.append( int(ovr_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF'))) if data_offset[-1] < ifd_offset[-1]: if ovr_count > 0: raise ValidateCloudOptimizedGeoTIFFException( "The offset of the first block of the smallest overview " "should be after its IFD") else: raise ValidateCloudOptimizedGeoTIFFException( "The offset of the first block of the image should " "be after its IFD") for i in range(len(data_offset) - 2, 0, -1): if data_offset[i] < data_offset[i + 1]: raise ValidateCloudOptimizedGeoTIFFException( "The offset of the first block of overview of index %d should " "be after the one of the overview of index %d" % (i - 1, i)) if len(data_offset) >= 2 and data_offset[0] < data_offset[1]: raise ValidateCloudOptimizedGeoTIFFException( "The offset of the first block of the main resolution image " "should be after the one of the overview of index %d" % (ovr_count - 1))
def vsigs_1(): if not gdaltest.built_against_curl(): return 'skip' # Invalid header filename gdal.ErrorReset() gdal.SetConfigOption('GDAL_HTTP_HEADER_FILE', '/i_dont/exist.py') f = open_for_read('/vsigs/foo/bar') if f is None: gdal.SetConfigOption('GDAL_HTTP_HEADER_FILE', None) gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): data = gdal.VSIFReadL(1, 1, f) last_err = gdal.GetLastErrorMsg() gdal.SetConfigOption('GDAL_HTTP_HEADER_FILE', None) gdal.VSIFCloseL(f) if len(data) != 0: gdaltest.post_reason('fail') return 'fail' if last_err.find('Cannot read') < 0: gdaltest.post_reason('fail') print(last_err) return 'fail' # Invalid content for header file gdal.SetConfigOption('GDAL_HTTP_HEADER_FILE', 'vsigs.py') f = open_for_read('/vsigs/foo/bar') if f is None: gdal.SetConfigOption('GDAL_HTTP_HEADER_FILE', None) gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 1, f) gdal.SetConfigOption('GDAL_HTTP_HEADER_FILE', None) gdal.VSIFCloseL(f) if len(data) != 0: gdaltest.post_reason('fail') return 'fail' # Missing GS_SECRET_ACCESS_KEY gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') if f is not None or gdal.VSIGetLastErrorMsg().find('GS_SECRET_ACCESS_KEY') < 0: gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs_streaming/foo/bar') if f is not None or gdal.VSIGetLastErrorMsg().find('GS_SECRET_ACCESS_KEY') < 0: gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', 'GS_SECRET_ACCESS_KEY') # Missing GS_ACCESS_KEY_ID gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') if f is not None or gdal.VSIGetLastErrorMsg().find('GS_ACCESS_KEY_ID') < 0: gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' gdal.SetConfigOption('GS_ACCESS_KEY_ID', 'GS_ACCESS_KEY_ID') # ERROR 1: The User Id you provided does not exist in our records. gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar.baz') if f is not None or gdal.VSIGetLastErrorMsg() == '': if f is not None: gdal.VSIFCloseL(f) if gdal.GetConfigOption('APPVEYOR') is not None: return 'success' gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs_streaming/foo/bar.baz') if f is not None or gdal.VSIGetLastErrorMsg() == '': gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' return 'success'
def virtualmem_4(): if gdal.GetConfigOption('SKIP_VIRTUALMEM'): return 'skip' try: from osgeo import gdalnumeric except ImportError: return 'skip' if not sys.platform.startswith('linux'): return 'skip' tmpfile = 'tmp/virtualmem_4.tif' for option in ['INTERLEAVE=PIXEL', 'INTERLEAVE=BAND']: gdal.Unlink(tmpfile) ds = gdal.GetDriverByName('GTiff').Create(tmpfile, 400, 301, 2, options=[option]) ar1 = ds.GetRasterBand(1).GetVirtualMemAutoArray(gdal.GF_Write) if gdal.GetLastErrorMsg().find('mmap() failed') >= 0: ar1 = None ds = None return 'skip' ar1 = None ar1 = ds.GetRasterBand(1).GetVirtualMemAutoArray(gdal.GF_Write) ar1_bis = ds.GetRasterBand(1).GetVirtualMemAutoArray(gdal.GF_Write) ar2 = ds.GetRasterBand(2).GetVirtualMemAutoArray(gdal.GF_Write) for y in range(ds.RasterYSize): ar1[y].fill(127) ar2[y].fill(255) val = ar1_bis[0][0] # We need to destroy the array before dataset destruction ar1 = None ar1_bis = None ar2 = None ds = None if val != 127: gdaltest.post_reason('fail') print(val) return 'fail' ds = gdal.Open(tmpfile) ar1 = ds.GetRasterBand(1).GetVirtualMemAutoArray(gdal.GF_Read) ar2 = ds.GetRasterBand(2).GetVirtualMemAutoArray(gdal.GF_Read) ar_127 = gdalnumeric.empty(ds.RasterXSize) ar_127.fill(127) ar_255 = gdalnumeric.empty(ds.RasterXSize) ar_255.fill(255) for y in range(ds.RasterYSize): if not gdalnumeric.array_equal(ar1[y], ar_127): gdaltest.post_reason('fail') ar1 = None ar2 = None ds = None return 'fail' if not gdalnumeric.array_equal(ar2[y], ar_255): gdaltest.post_reason('fail') ar1 = None ar2 = None ds = None return 'fail' # We need to destroy the array before dataset destruction ar1 = None ar2 = None ds = None gdal.GetDriverByName('GTiff').Delete(tmpfile) return 'success'
def wms_8(): if gdaltest.wms_drv is None: return 'skip' # server_url = 'http://tilecache.osgeo.org/wms-c/Basic.py' # wmstms_version = '/1.0.0/basic' # zero_tile = wmstms_version + '/0/0/0.png' # server_url_mask = server_url # ovr_upper_level = 18 # tms = """<GDAL_WMS> # <Service name="TMS"> # <ServerUrl>%s</ServerUrl> # <Layer>basic</Layer> # <Format>png</Format> # </Service> # <DataWindow> # <UpperLeftX>-180.0</UpperLeftX> # <UpperLeftY>90.0</UpperLeftY> # <LowerRightX>180.0</LowerRightX> # <LowerRightY>-90.0</LowerRightY> # <TileLevel>19</TileLevel> # <TileCountX>2</TileCountX> # <TileCountY>1</TileCountY> # </DataWindow> # <Projection>EPSG:4326</Projection> # <BlockSizeX>256</BlockSizeX> # <BlockSizeY>256</BlockSizeY> # <BandsCount>3</BandsCount> # <Cache><Path>./tmp/gdalwmscache</Path></Cache> # </GDAL_WMS>""" % server_url_mask # tms_nocache = """<GDAL_WMS> # <Service name="TMS"> # <ServerUrl>%s</ServerUrl> # <Layer>basic</Layer> # <Format>png</Format> # </Service> # <DataWindow> # <UpperLeftX>-180.0</UpperLeftX> # <UpperLeftY>90.0</UpperLeftY> # <LowerRightX>180.0</LowerRightX> # <LowerRightY>-90.0</LowerRightY> # <TileLevel>19</TileLevel> # <TileCountX>2</TileCountX> # <TileCountY>1</TileCountY> # </DataWindow> # <Projection>EPSG:4326</Projection> # <BlockSizeX>256</BlockSizeX> # <BlockSizeY>256</BlockSizeY> # <BandsCount>3</BandsCount> # <Cache/> <!-- this is needed for GDAL_DEFAULT_WMS_CACHE_PATH to be triggered --> # </GDAL_WMS>""" % server_url_mask server_url = 'http://tile.openstreetmap.org' wmstms_version = '' zero_tile = '/0/0/0.png' server_url_mask = server_url + '/${z}/${x}/${y}.png' ovr_upper_level = 16 tms = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>%s</ServerUrl> </Service> <DataWindow> <UpperLeftX>-20037508.34</UpperLeftX> <UpperLeftY>20037508.34</UpperLeftY> <LowerRightX>20037508.34</LowerRightX> <LowerRightY>-20037508.34</LowerRightY> <TileLevel>18</TileLevel> <TileCountX>1</TileCountX> <TileCountY>1</TileCountY> <YOrigin>top</YOrigin> </DataWindow> <Projection>EPSG:3857</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache><Path>./tmp/gdalwmscache</Path></Cache> </GDAL_WMS>""" % server_url_mask tms_nocache = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>%s</ServerUrl> </Service> <DataWindow> <UpperLeftX>-20037508.34</UpperLeftX> <UpperLeftY>20037508.34</UpperLeftY> <LowerRightX>20037508.34</LowerRightX> <LowerRightY>-20037508.34</LowerRightY> <TileLevel>18</TileLevel> <TileCountX>1</TileCountX> <TileCountY>1</TileCountY> <YOrigin>top</YOrigin> </DataWindow> <Projection>EPSG:3857</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache/> <!-- this is needed for GDAL_DEFAULT_WMS_CACHE_PATH to be triggered --> </GDAL_WMS>""" % server_url_mask if gdaltest.gdalurlopen(server_url) is None: return 'skip' try: shutil.rmtree('tmp/gdalwmscache') except: pass ds = gdal.Open( tms ) if ds is None: gdaltest.post_reason( 'open failed.' ) return 'fail' # Check cache metadata item cache_path = ds.GetMetadataItem("CACHE_PATH") if len(cache_path) == 0: gdaltest.post_reason( 'did not get expected cache path metadata item' ) return 'fail' cache_subfolder = hashlib.md5(server_url_mask.encode('utf-8')).hexdigest() gdal.ErrorReset() data = ds.GetRasterBand(1).GetOverview(ovr_upper_level).ReadRaster(0, 0, 512, 512) if gdal.GetLastErrorMsg() != '': if gdaltest.gdalurlopen(server_url + zero_tile) is None: return 'skip' ds = None file1 = hashlib.md5((server_url + wmstms_version + '/1/0/0.png').encode('utf-8')).hexdigest() file2 = hashlib.md5((server_url + wmstms_version + '/1/1/0.png').encode('utf-8')).hexdigest() file3 = hashlib.md5((server_url + wmstms_version + '/1/0/1.png').encode('utf-8')).hexdigest() file4 = hashlib.md5((server_url + wmstms_version + '/1/1/1.png').encode('utf-8')).hexdigest() expected_files = [ 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file1[0], file1[1], file1), 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file2[0], file2[1], file2), 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file3[0], file3[1], file3), 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file4[0], file4[1], file4)] for expected_file in expected_files: try: os.stat(expected_file) except: gdaltest.post_reason( '%s should exist' % expected_file) return 'fail' # Now, we should read from the cache ds = gdal.Open( tms ) cached_data = ds.GetRasterBand(1).GetOverview(ovr_upper_level).ReadRaster(0, 0, 512, 512) ds = None if data != cached_data: gdaltest.post_reason( 'data != cached_data' ) return 'fail' # Replace the cache with fake data for expected_file in expected_files: ds = gdal.GetDriverByName('GTiff').Create(expected_file, 256, 256, 4) ds.GetRasterBand(1).Fill(0) ds.GetRasterBand(2).Fill(0) ds.GetRasterBand(3).Fill(0) ds.GetRasterBand(4).Fill(255) ds = None # Read again from the cache, and check that it is actually used ds = gdal.Open( tms ) cs = ds.GetRasterBand(1).GetOverview(ovr_upper_level).Checksum() ds = None if cs != 0: gdaltest.post_reason( 'cs != 0' ) return 'fail' # Test with GDAL_DEFAULT_WMS_CACHE_PATH # Now, we should read from the cache gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", "./tmp/gdalwmscache") ds = gdal.Open( tms_nocache ) cs = ds.GetRasterBand(1).GetOverview(ovr_upper_level).Checksum() ds = None gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", None) if cs != 0: gdaltest.post_reason( 'cs != 0' ) return 'fail' # Check maxsize and expired tags tms_expires = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>%s</ServerUrl> </Service> <DataWindow> <UpperLeftX>-20037508.34</UpperLeftX> <UpperLeftY>20037508.34</UpperLeftY> <LowerRightX>20037508.34</LowerRightX> <LowerRightY>-20037508.34</LowerRightY> <TileLevel>18</TileLevel> <TileCountX>1</TileCountX> <TileCountY>1</TileCountY> <YOrigin>top</YOrigin> </DataWindow> <Projection>EPSG:3857</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache><Path>./tmp/gdalwmscache</Path><Expires>1</Expires></Cache> </GDAL_WMS>""" % server_url_mask mod_time = 0 for expected_file in expected_files: tm = os.path.getmtime(expected_file) if tm > mod_time: mod_time = tm ds = gdal.Open( tms_expires ) sleep(1.05) data = ds.GetRasterBand(1).GetOverview(ovr_upper_level).ReadRaster(0, 0, 512, 512) # tiles should be overwritten by new ones for expected_file in expected_files: if os.path.getmtime(expected_file) <= mod_time: return 'fail' return 'success'
def __getmetadata__(self, f=None): ''' Generate metadata for generic imagery @type f: string @param f: a filepath to the dataset or a VRT XML string @return: None @todo: We force a NoData value. This is not ideal, but it makes for better overview images. ''' if not f: f = self.fileinfo['filepath'] try: cwd = os.path.abspath(os.curdir) if os.path.exists(f) and os.path.dirname(f): p = os.path.split(f)[0] os.chdir(p) if not self._gdaldataset: self._gdaldataset = geometry.OpenDataset( f ) #in case we're subclassed and there's already a dataset open if self._gdaldataset: driver = self._gdaldataset.GetDriver().ShortName if driver[0:3] == 'HDF': raise NotImplementedError, 'HDF files are not yet implemented except by custom formats' self.metadata[ 'filetype'] = driver + '/' + self._gdaldataset.GetDriver( ).LongName self.metadata['cols'] = self._gdaldataset.RasterXSize self.metadata['rows'] = self._gdaldataset.RasterYSize self.metadata['nbands'] = self._gdaldataset.RasterCount self.metadata['srs'] = self._gdaldataset.GetProjection() if not self.metadata['srs'] and self._gdaldataset.GetGCPCount( ) > 0: self.metadata['srs'] = self._gdaldataset.GetGCPProjection() self.metadata['epsg'] = spatialreferences.IdentifyAusEPSG( self.metadata['srs']) self.metadata['units'] = spatialreferences.GetLinearUnitsName( self.metadata['srs']) geotransform = self._gdaldataset.GetGeoTransform() if geotransform == (0, 1, 0, 0, 0, 1): if self._gdaldataset.GetGCPCount() > 0: gcps = self._gdaldataset.GetGCPs() geotransform = gdal.GCPsToGeoTransform(gcps) gcps = geometry.GeoTransformToGCPs( geotransform, self.metadata['cols'], self. metadata['rows']) #Just get the 4 corner GCP's else: raise NotImplementedError, 'Dataset is not georeferenced' else: gcps = geometry.GeoTransformToGCPs(geotransform, self.metadata['cols'], self.metadata['rows']) ext = [[gcp.GCPX, gcp.GCPY] for gcp in gcps] ext.append([gcps[0].GCPX, gcps[0].GCPY ]) #Add the 1st point to close the polygon) #Reproject corners to lon,lat geom = geometry.GeomFromExtent(ext) src_srs = osr.SpatialReference() src_srs.ImportFromWkt(self.metadata['srs']) tgt_srs = osr.SpatialReference() tgt_srs.ImportFromEPSG(4326) geom = geometry.ReprojectGeom(geom, src_srs, tgt_srs) points = geom.GetGeometryRef(0) #geom.GetBoundary() ext = [[points.GetX(i), points.GetY(i)] for i in range(0, points.GetPointCount())] self.metadata['cellx'], self.metadata[ 'celly'] = geometry.CellSize(geotransform) self.metadata['rotation'] = geometry.Rotation(geotransform) if abs(self.metadata['rotation']) < 1.0: self.metadata['orientation'] = 'Map oriented' self.metadata['rotation'] = 0.0 else: self.metadata['orientation'] = 'Path oriented' self.metadata['UL'] = '%s,%s' % tuple(ext[0]) self.metadata['LL'] = '%s,%s' % tuple(ext[1]) self.metadata['LR'] = '%s,%s' % tuple(ext[2]) self.metadata['UR'] = '%s,%s' % tuple(ext[3]) rb = self._gdaldataset.GetRasterBand(1) if rb: self.metadata['datatype'] = gdal.GetDataTypeName( rb.DataType) self.metadata['nbits'] = gdal.GetDataTypeSize(rb.DataType) nodata = rb.GetNoDataValue() if nodata is not None: self.metadata['nodata'] = str(nodata) else: ct = rb.GetColorTable() #Fix for Issue 31 if ct is None: if self.metadata['datatype'][0:4] in [ 'Byte', 'UInt' ]: nodata = 0 #Unsigned, assume 0 else: nodata = -2**( self.metadata['nbits'] - 1 ) #Signed, assume min value in data range self.metadata['nodata'] = str(nodata) #Fix for Issue 17 for i in range(1, self._gdaldataset.RasterCount + 1): self._gdaldataset.GetRasterBand( i).SetNoDataValue(nodata) else: raise IOError, 'No valid rasterbands found.' metadata = self._gdaldataset.GetMetadata() self.metadata['metadata'] = '\n'.join( ['%s: %s' % (m, metadata[m]) for m in metadata]) self.metadata['filesize'] = sum( [os.path.getsize(tmp) for tmp in self.filelist]) if self.metadata['filesize'] > 0: self.metadata['compressionratio'] = int( (self.metadata['nbands'] * self.metadata['cols'] * self.metadata['rows'] * (self.metadata['nbits'] / 8.0)) / self.metadata['filesize']) if self.metadata['compressionratio'] > 0: try: if driver[0:3] == 'JP2': self.metadata['compressiontype'] = "JPEG2000" elif driver[0:3] == 'ECW': self.metadata['compressiontype'] = "ECW" else: mdis = self._gdaldataset.GetMetadata( 'IMAGE_STRUCTURE') #self.metadata['compressiontype']=mdis['IMAGE_STRUCTURE'] self.metadata['compressiontype'] = mdis[ 'COMPRESSION'] except: self.metadata['compressiontype'] = 'Unknown' else: self.metadata['compressiontype'] = 'None' self.extent = ext else: errmsg = gdal.GetLastErrorMsg() raise IOError, 'Unable to open %s\n%s' % (f, errmsg.strip()) finally: #Cleanup gdal.ErrorReset() os.chdir(cwd)
def main(argv=None): bComputeMinMax = False bSample = False bShowGCPs = True bShowMetadata = True bShowRAT = False debug = False bStats = False bApproxStats = True bShowColorTable = True bComputeChecksum = False bReportHistograms = False pszFilename = None papszExtraMDDomains = [] pszProjection = None hTransform = None bShowFileList = True dst_xml = None template_xml = None bands = 1 iOverview = None if argv is None: argv = sys.argv argv = gdal.GeneralCmdLineProcessor(argv) if argv is None: return 1 nArgc = len(argv) #/* -------------------------------------------------------------------- */ #/* Parse arguments. */ #/* -------------------------------------------------------------------- */ i = 1 while i < nArgc: if EQUAL(argv[i], "--utility_version"): print("%s is running against GDAL %s" % (argv[0], gdal.VersionInfo("RELEASE_NAME"))) return 0 elif EQUAL(argv[i], "-debug"): debug = True elif EQUAL(argv[i], "-mm"): bComputeMinMax = True elif EQUAL(argv[i], "-hist"): bReportHistograms = True elif EQUAL(argv[i], "-stats"): bStats = True bApproxStats = False elif EQUAL(argv[i], "-approx_stats"): bStats = True bApproxStats = True elif EQUAL(argv[i], "-sample"): bSample = True elif EQUAL(argv[i], "-checksum"): bComputeChecksum = True elif EQUAL(argv[i], "-nogcp"): bShowGCPs = False elif EQUAL(argv[i], "-nomd"): bShowMetadata = False elif EQUAL(argv[i], "-norat"): bShowRAT = False elif EQUAL(argv[i], "-noct"): bShowColorTable = False elif EQUAL(argv[i], "-mdd") and i < nArgc - 1: i = i + 1 papszExtraMDDomains.append(argv[i]) elif EQUAL(argv[i], "-nofl"): bShowFileList = False elif argv[i][0] == '-': return Usage(argv[0]) elif pszFilename is None: pszFilename = argv[i] elif template_xml is None: template_xml = argv[i] elif dst_xml is None: dst_xml = argv[i] else: return Usage(argv[0]) i = i + 1 if pszFilename is None: return Usage(argv[0]) if template_xml is None: return Usage(argv[0]) if dst_xml is None: return Usage(argv[0]) #/* -------------------------------------------------------------------- */ #/* Open GDAL dataset. */ #/* -------------------------------------------------------------------- */ hDataset = gdal.Open(pszFilename, gdal.GA_ReadOnly) if hDataset is None: print("gdalinfo failed - unable to open '%s'." % pszFilename) sys.exit(1) #/* -------------------------------------------------------------------- */ #/* load XML template file (generally fgdc-template.xml) */ #/* -------------------------------------------------------------------- */ parser = etree.XMLParser(remove_blank_text=True) tree = etree.parse(template_xml, parser) for lworkcit in tree.getiterator('lworkcit'): for citeinfo in lworkcit.getiterator('citeinfo'): title = citeinfo.find('title') if title is None: title = etree.SubElement(citeinfo, 'title') title.text = pszFilename #/* -------------------------------------------------------------------- */ #/* Report general info. */ #/* -------------------------------------------------------------------- */ hDriver = hDataset.GetDriver() if debug: print( "Driver: %s/%s" % ( \ hDriver.ShortName, \ hDriver.LongName )) papszFileList = hDataset.GetFileList() if papszFileList is None or len(papszFileList) == 0: print("Files: none associated") else: if debug: print("Files: %s" % papszFileList[0]) if bShowFileList: for i in range(1, len(papszFileList)): print(" %s" % papszFileList[i]) if debug: print("Size is %d, %d" % (hDataset.RasterXSize, hDataset.RasterYSize)) #/* -------------------------------------------------------------------- */ #/* Report projection. */ #/* -------------------------------------------------------------------- */ pszProjection = hDataset.GetProjectionRef() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) mapProjection = "None" #Extract projection information target = hSRS.GetAttrValue("DATUM", 0).replace("D_", "").replace("_2000", "") semiMajor = hSRS.GetSemiMajor() #/ 1000.0 semiMinor = hSRS.GetSemiMinor() #/ 1000.0 invFlat = hSRS.GetInvFlattening() if (pszProjection[0:6] == "GEOGCS"): mapProjection = "SIMPLE_CYLINDRICAL" centLat = 0 centLon = 0 if (pszProjection[0:6] == "PROJCS"): mapProjection = hSRS.GetAttrValue("PROJECTION", 0) for horizsys in tree.getiterator('horizsys'): horizsys.clear() planar = etree.SubElement(horizsys, 'planar') mapproj = etree.SubElement(planar, 'mapproj') mapprojn = etree.SubElement(mapproj, 'mapprojn') if EQUAL(mapProjection, "Equirectangular"): #for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Equirectangular" centLat = None centLat = hSRS.GetProjParm('standard_parallel_1') if centLat == None: centLat = hSRS.GetProjParm('latitude_of_origin') centLon = hSRS.GetProjParm('central_meridian') equirect = etree.SubElement(mapproj, 'equirect') #for equirect in tree.getiterator('equirect'): stdparll = etree.SubElement(equirect, 'stdparll') #for stdparll in equirect.getiterator('stdparll'): stdparll.text = str(centLat) #for longcm in equirect.getiterator('longcm'): longcm = etree.SubElement(equirect, 'longcm') longcm.text = str(centLon) #for feast in equirect.getiterator('feast'): feast = etree.SubElement(equirect, 'feast') feast.text = str(hSRS.GetProjParm('false_easting')) #for fnorth in equirect.getiterator('fnorth'): fnorth = etree.SubElement(equirect, 'fnorth') fnorth.text = str(hSRS.GetProjParm('false_northing')) # Change to building projection XML section instead of replace # Change to merc instead of transmer if EQUAL(mapProjection, "Mercator"): for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Mercator" centLat = None centLat = hSRS.GetProjParm('latitude_of_origin') if centLat == None: centLat = hSRS.GetProjParm('standard_parallel_1') centLon = hSRS.GetProjParm('central_meridian') scale = hSRS.GetProjParm('scale_factor') for merc in tree.getiterator('transmer'): for stdparll in merc.getiterator('stdparll'): stdparll.text = str(centLat) for longcm in merc.getiterator('longcm'): longcm.text = str(centLon) for sfequat in merc.getiterator('sfequat'): sfequat.text = str(scale) for feast in merc.getiterator('feast'): feast.text = str(hSRS.GetProjParm('false_easting')) for fnorth in merc.getiterator('fnorth'): fnorth.text = str( hSRS.GetProjParm('false_northing')) # Change to building projection XML section instead of replace if EQUAL(mapProjection, "Orthographic "): for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Orthographic" centLat = hSRS.GetProjParm('latitude_of_origin ') centLon = hSRS.GetProjParm('central_meridian') for orthogr in tree.getiterator('orthogr'): for stdparll in orthogr.getiterator('stdparll'): stdparll.text = str(centLat) for longcm in orthogr.getiterator('longcm'): longcm.text = str(centLon) for feast in orthogr.getiterator('feast'): feast.text = str(hSRS.GetProjParm('false_easting')) for fnorth in orthogr.getiterator('fnorth'): fnorth.text = str( hSRS.GetProjParm('false_northing')) # Change to building projection XML section instead of replace if EQUAL(mapProjection, "Stereographic"): for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Stereographic" centLat = hSRS.GetProjParm('latitude_of_origin') centLon = hSRS.GetProjParm('central_meridian') for stereo in tree.getiterator('stereo'): for latprjc in stereo.getiterator('latprjc'): latprjc.text = str(centLat) for longpc in stereo.getiterator('longpc'): longpc.text = str(centLon) for feast in stereo.getiterator('feast'): feast.text = str(hSRS.GetProjParm('false_easting')) for fnorth in stereo.getiterator('fnorth'): fnorth.text = str( hSRS.GetProjParm('false_northing')) # Change to building projection XML section instead of replace if EQUAL(mapProjection, "Sinusoidal"): for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Sinusoidal" centLon = None centLon = hSRS.GetProjParm('longitude_of_center') if centLon == None: centLon = hSRS.GetProjParm('central_meridian') for sinusoid in tree.getiterator('sinusoid'): for longcm in sinusoid.getiterator('longcm'): longcm.text = str(centLon) for feast in sinusoid.getiterator('feast'): feast.text = str(hSRS.GetProjParm('false_easting')) for fnorth in sinusoid.getiterator('fnorth'): fnorth.text = str( hSRS.GetProjParm('false_northing')) # Change to building projection XML section instead of replace if EQUAL(mapProjection, "Robinson"): for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Robinson" centLon = None centLon = hSRS.GetProjParm('longitude_of_center') if centLon == None: centLon = hSRS.GetProjParm('central_meridian') for robinson in tree.getiterator('robinson'): for longpc in robinson.getiterator('longpc'): longpc.text = str(centLon) for feast in robinson.getiterator('feast'): feast.text = str(hSRS.GetProjParm('false_easting')) for fnorth in robinson.getiterator('fnorth'): fnorth.text = str( hSRS.GetProjParm('false_northing')) # Change to building projection XML section instead of replace if (EQUAL(mapProjection, "Polar_Stereographic") or EQUAL(mapProjection, "Stereographic_North_Pole") or EQUAL(mapProjection, "Stereographic_South_Pole")): for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Polar Stereographic" centLat = hSRS.GetProjParm('latitude_of_origin') centLon = hSRS.GetProjParm('central_meridian') scale = hSRS.GetProjParm('scale_factor') for polarst in tree.getiterator('polarst'): for stdparll in polarst.getiterator('stdparll'): stdparll.text = str(centLat) for svlong in polarst.getiterator('svlong'): svlong.text = str(centLon) for sfprjorg in polarst.getiterator('sfprjorg'): sfprjorg.text = str(scale) for feast in polarst.getiterator('feast'): feast.text = str(hSRS.GetProjParm('false_easting')) for fnorth in polarst.getiterator('fnorth'): fnorth.text = str( hSRS.GetProjParm('false_northing')) # Change to building projection XML section instead of replace if EQUAL(mapProjection, "Transverse_Mercator"): for mapprojn in tree.getiterator('mapprojn'): mapprojn.text = "Transverse Mercator" centLat = hSRS.GetProjParm('latitude_of_origin') centLon = hSRS.GetProjParm('central_meridian') scale = hSRS.GetProjParm('scale_factor') for transmer in tree.getiterator('transmer'): for latprjo in transmer.getiterator('latprjo'): latprjo.text = str(centLat) for longcm in transmer.getiterator('longcm'): longcm.text = str(centLon) for sfctrmer in transmer.getiterator('sfctrmer'): sfctrmer.text = str(scale) for feast in transmer.getiterator('feast'): feast.text = str(hSRS.GetProjParm('false_easting')) for fnorth in transmer.getiterator('fnorth'): fnorth.text = str( hSRS.GetProjParm('false_northing')) #Create cellsize block for all projections planci = etree.SubElement(planar, 'planci') plance = etree.SubElement(planci, 'plance') plance.text = 'row and column' coordrep = etree.SubElement(planci, 'coordrep') absres = etree.SubElement(coordrep, 'absres') ordres = etree.SubElement(coordrep, 'ordres') plandu = etree.SubElement(planci, 'plandu') if debug: print("Coordinate System is:\n%s" % pszPrettyWkt) else: print("Warning - Can't parse this type of projection\n") print("Coordinate System is `%s'" % pszProjection) sys.exit(1) else: print("Warning - No Coordinate System defined:\n") sys.exit(1) #/* -------------------------------------------------------------------- */ #/* Report Geotransform. */ #/* -------------------------------------------------------------------- */ adfGeoTransform = hDataset.GetGeoTransform(can_return_null=True) if adfGeoTransform is not None: if adfGeoTransform[2] == 0.0 and adfGeoTransform[4] == 0.0: if debug: print( "Origin = (%.15f,%.15f)" % ( \ adfGeoTransform[0], adfGeoTransform[3] )) print( "Pixel Size = (%.15f,%.15f)" % ( \ adfGeoTransform[1], adfGeoTransform[5] )) else: if debug: print( "GeoTransform =\n" \ " %.16g, %.16g, %.16g\n" \ " %.16g, %.16g, %.16g" % ( \ adfGeoTransform[0], \ adfGeoTransform[1], \ adfGeoTransform[2], \ adfGeoTransform[3], \ adfGeoTransform[4], \ adfGeoTransform[5] )) if (pszProjection[0:6] == "GEOGCS"): #convert degrees/pixel to km/pixel mapres = 1 / adfGeoTransform[1] lonres = adfGeoTransform[1] latres = adfGeoTransform[5] kmres = adfGeoTransform[1] * (semiMajor * math.pi / 180.0) / 1000.0 else: #convert m/pixel to pixel/degree mapres = 1 / (adfGeoTransform[1] / (semiMajor * math.pi / 180.0)) lonres = adfGeoTransform[1] / (semiMajor * math.pi / 180.0) latres = adfGeoTransform[5] / (semiMajor * math.pi / 180.0) xres = adfGeoTransform[1] yres = adfGeoTransform[5] kmres = adfGeoTransform[1] / 1000.0 #/* -------------------------------------------------------------------- */ #/* Report GCPs. */ #/* -------------------------------------------------------------------- */ if bShowGCPs and hDataset.GetGCPCount() > 0: pszProjection = hDataset.GetGCPProjection() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) if debug: print("GCP Projection = \n%s" % pszPrettyWkt) else: if debug: print( "GCP Projection = %s" % \ pszProjection ) gcps = hDataset.GetGCPs() i = 0 for gcp in gcps: if debug: print( "GCP[%3d]: Id=%s, Info=%s\n" \ " (%.15g,%.15g) -> (%.15g,%.15g,%.15g)" % ( \ i, gcp.Id, gcp.Info, \ gcp.GCPPixel, gcp.GCPLine, \ gcp.GCPX, gcp.GCPY, gcp.GCPZ )) i = i + 1 #/* -------------------------------------------------------------------- */ #/* Report metadata. */ #/* -------------------------------------------------------------------- */ if debug: if bShowMetadata: papszMetadata = hDataset.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print("Metadata:") for metadata in papszMetadata: print(" %s" % metadata) if bShowMetadata: for extra_domain in papszExtraMDDomains: papszMetadata = hDataset.GetMetadata_List(extra_domain) if papszMetadata is not None and len(papszMetadata) > 0: print("Metadata (%s):" % extra_domain) for metadata in papszMetadata: print(" %s" % metadata) #/* -------------------------------------------------------------------- */ #/* Report "IMAGE_STRUCTURE" metadata. */ #/* -------------------------------------------------------------------- */ if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print("Image Structure Metadata:") for metadata in papszMetadata: print(" %s" % metadata) #/* -------------------------------------------------------------------- */ #/* Report subdatasets. */ #/* -------------------------------------------------------------------- */ papszMetadata = hDataset.GetMetadata_List("SUBDATASETS") if papszMetadata is not None and len(papszMetadata) > 0: print("Subdatasets:") for metadata in papszMetadata: print(" %s" % metadata) #/* -------------------------------------------------------------------- */ #/* Report geolocation. */ #/* -------------------------------------------------------------------- */ if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("GEOLOCATION") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print("Geolocation:") for metadata in papszMetadata: print(" %s" % metadata) #/* -------------------------------------------------------------------- */ #/* Report RPCs */ #/* -------------------------------------------------------------------- */ if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("RPC") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print("RPC Metadata:") for metadata in papszMetadata: print(" %s" % metadata) #/* -------------------------------------------------------------------- */ #/* Setup projected to lat/long transform if appropriate. */ #/* -------------------------------------------------------------------- */ if pszProjection is not None and len(pszProjection) > 0: hProj = osr.SpatialReference(pszProjection) if hProj is not None: hLatLong = hProj.CloneGeogCS() if hLatLong is not None: gdal.PushErrorHandler('CPLQuietErrorHandler') hTransform = osr.CoordinateTransformation(hProj, hLatLong) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Unable to load PROJ.4 library') != -1: hTransform = None #/* -------------------------------------------------------------------- */ #/* Report corners. */ #/* -------------------------------------------------------------------- */ if debug: print("Corner Coordinates:") GDALInfoReportCorner( hDataset, hTransform, "Upper Left", \ 0.0, 0.0 ) GDALInfoReportCorner( hDataset, hTransform, "Lower Left", \ 0.0, hDataset.RasterYSize) GDALInfoReportCorner( hDataset, hTransform, "Upper Right", \ hDataset.RasterXSize, 0.0 ) GDALInfoReportCorner( hDataset, hTransform, "Lower Right", \ hDataset.RasterXSize, \ hDataset.RasterYSize ) GDALInfoReportCorner( hDataset, hTransform, "Center", \ hDataset.RasterXSize/2.0, \ hDataset.RasterYSize/2.0 ) #Get bounds ulx = GDALGetLon(hDataset, hTransform, 0.0, 0.0) uly = GDALGetLat(hDataset, hTransform, 0.0, 0.0) lrx = GDALGetLon( hDataset, hTransform, hDataset.RasterXSize, \ hDataset.RasterYSize ) lry = GDALGetLat( hDataset, hTransform, hDataset.RasterXSize, \ hDataset.RasterYSize ) #/* ==================================================================== */ #/* Loop over bands. */ #/* ==================================================================== */ if debug: bands = hDataset.RasterCount for iBand in range(hDataset.RasterCount): hBand = hDataset.GetRasterBand(iBand + 1) (nBlockXSize, nBlockYSize) = hBand.GetBlockSize() print( "Band %d Block=%dx%d Type=%s, ColorInterp=%s" % ( iBand+1, \ nBlockXSize, nBlockYSize, \ gdal.GetDataTypeName(hBand.DataType), \ gdal.GetColorInterpretationName( \ hBand.GetRasterColorInterpretation()) )) if hBand.GetDescription() is not None \ and len(hBand.GetDescription()) > 0 : print(" Description = %s" % hBand.GetDescription()) dfMin = hBand.GetMinimum() dfMax = hBand.GetMaximum() if dfMin is not None or dfMax is not None or bComputeMinMax: line = " " if dfMin is not None: line = line + ("Min=%.3f " % dfMin) if dfMax is not None: line = line + ("Max=%.3f " % dfMax) if bComputeMinMax: gdal.ErrorReset() adfCMinMax = hBand.ComputeRasterMinMax(False) if gdal.GetLastErrorType() == gdal.CE_None: line = line + ( " Computed Min/Max=%.3f,%.3f" % ( \ adfCMinMax[0], adfCMinMax[1] )) print(line) stats = hBand.GetStatistics(bApproxStats, bStats) # Dirty hack to recognize if stats are valid. If invalid, the returned # stddev is negative if stats[3] >= 0.0: print( " Minimum=%.3f, Maximum=%.3f, Mean=%.3f, StdDev=%.3f" % ( \ stats[0], stats[1], stats[2], stats[3] )) if bReportHistograms: hist = hBand.GetDefaultHistogram(force=True, callback=gdal.TermProgress) if hist is not None: dfMin = hist[0] dfMax = hist[1] nBucketCount = hist[2] panHistogram = hist[3] print( " %d buckets from %g to %g:" % ( \ nBucketCount, dfMin, dfMax )) line = ' ' for bucket in panHistogram: line = line + ("%d " % bucket) print(line) if bComputeChecksum: print(" Checksum=%d" % hBand.Checksum()) dfNoData = hBand.GetNoDataValue() if dfNoData is not None: if dfNoData != dfNoData: print(" NoData Value=nan") else: print(" NoData Value=%.18g" % dfNoData) if hBand.GetOverviewCount() > 0: line = " Overviews: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%dx%d" % (hOverview.XSize, hOverview.YSize)) pszResampling = \ hOverview.GetMetadataItem( "RESAMPLING", "" ) if pszResampling is not None \ and len(pszResampling) >= 12 \ and EQUAL(pszResampling[0:12],"AVERAGE_BIT2"): line = line + "*" else: line = line + "(null)" print(line) if bComputeChecksum: line = " Overviews checksum: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%d" % hOverview.Checksum()) else: line = line + "(null)" print(line) if hBand.HasArbitraryOverviews(): print(" Overviews: arbitrary") nMaskFlags = hBand.GetMaskFlags() if (nMaskFlags & (gdal.GMF_NODATA | gdal.GMF_ALL_VALID)) == 0: hMaskBand = hBand.GetMaskBand() line = " Mask Flags: " if (nMaskFlags & gdal.GMF_PER_DATASET) != 0: line = line + "PER_DATASET " if (nMaskFlags & gdal.GMF_ALPHA) != 0: line = line + "ALPHA " if (nMaskFlags & gdal.GMF_NODATA) != 0: line = line + "NODATA " if (nMaskFlags & gdal.GMF_ALL_VALID) != 0: line = line + "ALL_VALID " print(line) if hMaskBand is not None and \ hMaskBand.GetOverviewCount() > 0: line = " Overviews of mask band: " for iOverview in range(hMaskBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hMaskBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%d" % hOverview.Checksum()) else: line = line + "(null)" if len(hBand.GetUnitType()) > 0: print(" Unit Type: %s" % hBand.GetUnitType()) papszCategories = hBand.GetRasterCategoryNames() if papszCategories is not None: print(" Categories:") i = 0 for category in papszCategories: print(" %3d: %s" % (i, category)) i = i + 1 if hBand.GetScale() != 1.0 or hBand.GetOffset() != 0.0: print( " Offset: %.15g, Scale:%.15g" % \ ( hBand.GetOffset(), hBand.GetScale())) if bShowMetadata: papszMetadata = hBand.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print(" Metadata:") for metadata in papszMetadata: print(" %s" % metadata) if bShowMetadata: papszMetadata = hBand.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print(" Image Structure Metadata:") for metadata in papszMetadata: print(" %s" % metadata) hTable = hBand.GetRasterColorTable() if hBand.GetRasterColorInterpretation() == gdal.GCI_PaletteIndex \ and hTable is not None: print( " Color Table (%s with %d entries)" % (\ gdal.GetPaletteInterpretationName( \ hTable.GetPaletteInterpretation( )), \ hTable.GetCount() )) if bShowColorTable: for i in range(hTable.GetCount()): sEntry = hTable.GetColorEntry(i) print( " %3d: %d,%d,%d,%d" % ( \ i, \ sEntry[0],\ sEntry[1],\ sEntry[2],\ sEntry[3] )) if bShowRAT: hRAT = hBand.GetDefaultRAT() #GDALRATDumpReadable( hRAT, None ); if iOverview is not None: hOverview = hBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%d" % hOverview.Checksum()) else: line = line + "(null)" print(line) if hBand.HasArbitraryOverviews(): print(" Overviews: arbitrary") nMaskFlags = hBand.GetMaskFlags() if (nMaskFlags & (gdal.GMF_NODATA | gdal.GMF_ALL_VALID)) == 0: hMaskBand = hBand.GetMaskBand() line = " Mask Flags: " if (nMaskFlags & gdal.GMF_PER_DATASET) != 0: line = line + "PER_DATASET " if (nMaskFlags & gdal.GMF_ALPHA) != 0: line = line + "ALPHA " if (nMaskFlags & gdal.GMF_NODATA) != 0: line = line + "NODATA " if (nMaskFlags & gdal.GMF_ALL_VALID) != 0: line = line + "ALL_VALID " print(line) if hMaskBand is not None and \ hMaskBand.GetOverviewCount() > 0: line = " Overviews of mask band: " for iOverview in range(hMaskBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hMaskBand.GetOverview(iOverview) if hOverview is not None: line = line + ("%d" % hOverview.Checksum()) else: line = line + "(null)" if len(hBand.GetUnitType()) > 0: print(" Unit Type: %s" % hBand.GetUnitType()) papszCategories = hBand.GetRasterCategoryNames() if papszCategories is not None: print(" Categories:") i = 0 for category in papszCategories: print(" %3d: %s" % (i, category)) i = i + 1 if hBand.GetScale() != 1.0 or hBand.GetOffset() != 0.0: print( " Offset: %.15g, Scale:%.15g" % \ ( hBand.GetOffset(), hBand.GetScale())) if bShowMetadata: papszMetadata = hBand.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print(" Metadata:") for metadata in papszMetadata: print(" %s" % metadata) if bShowMetadata: papszMetadata = hBand.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len( papszMetadata) > 0: print(" Image Structure Metadata:") for metadata in papszMetadata: print(" %s" % metadata) hTable = hBand.GetRasterColorTable() if hBand.GetRasterColorInterpretation() == gdal.GCI_PaletteIndex \ and hTable is not None: print( " Color Table (%s with %d entries)" % (\ gdal.GetPaletteInterpretationName( \ hTable.GetPaletteInterpretation( )), \ hTable.GetCount() )) if bShowColorTable: for i in range(hTable.GetCount()): sEntry = hTable.GetColorEntry(i) print( " %3d: %d,%d,%d,%d" % ( \ i, \ sEntry[0],\ sEntry[1],\ sEntry[2],\ sEntry[3] )) if bShowRAT: hRAT = hBand.GetDefaultRAT() #GDALRATDumpReadable( hRAT, None ); #/************************************************************************/ #/* WriteXML bits to FGDC template */ #/************************************************************************/ for rasttype in tree.getiterator('rasttype'): rasttype.text = "Pixel" hBand = hDataset.GetRasterBand(1) for ellips in tree.getiterator('ellips'): ellips.text = target for semiaxis in tree.getiterator('semiaxis'): semiaxis.text = str(semiMajor) for denflat in tree.getiterator('denflat'): denflat.text = str(invFlat) if (pszProjection[0:6] == "GEOGCS"): for latSize in tree.getiterator('latres'): latSize.text = str(latres) if debug: print 'Lat resolution: %s' % (latSize.text) for lonSize in tree.getiterator('lonres'): lonSize.text = str(lonres) for geogunit in tree.getiterator('geogunit'): geogunit.text = "Decimal degrees" else: for absres in tree.getiterator('absres'): # in meters absres.text = str(xres) if debug: print 'X resolution: %s' % (absres.text) for ordres in tree.getiterator('ordres'): ordres.text = str(abs(yres)) for plandu in tree.getiterator('plandu'): plandu.text = "meters" for southbc in tree.getiterator('southbc'): southbc.text = str(lry) for northbc in tree.getiterator('northbc'): northbc.text = str(uly) for westbc in tree.getiterator('westbc'): westbc.text = str(ulx) for eastbc in tree.getiterator('eastbc'): eastbc.text = str(lrx) for rowcount in tree.getiterator('rowcount'): rowcount.text = str(hDataset.RasterYSize) for colcount in tree.getiterator('colcount'): colcount.text = str(hDataset.RasterYSize) for vrtcount in tree.getiterator('vrtcount'): vrtcount.text = str(hDataset.RasterCount) for metstdn in tree.getiterator('metstdn'): metstdn.text = "FGDC Content Standards for Digital Geospatial Metadata" for metstdv in tree.getiterator('metstdv'): metstdv.text = "FGDC-STD-001-1998" #/* ==================================================================== */ #/* writeout sparse XML for merging */ #/* ==================================================================== */ try: #tree.write(dst_xml, pretty_print=True, xml_declaration=True) #mp doesn't like declaration tree.write(dst_xml, pretty_print=True) except ImportError: print("Failed to write out XML document") return 0
def validate(ds, check_tiled=True, full_check=False): """Check if a file is a (Geo)TIFF with cloud optimized compatible structure. Args: ds: GDAL Dataset for the file to inspect. check_tiled: Set to False to ignore missing tiling. full_check: Set to TRUe to check tile/strip leader/trailer bytes. Might be slow on remote files Returns: A tuple, whose first element is an array of error messages (empty if there is no error), and the second element, a dictionary with the structure of the GeoTIFF file. Raises: ValidateCloudOptimizedGeoTIFFException: Unable to open the file or the file is not a Tiff. """ if int(gdal.VersionInfo('VERSION_NUM')) < 2020000: raise ValidateCloudOptimizedGeoTIFFException( 'GDAL 2.2 or above required') unicode_type = type(''.encode('utf-8').decode('utf-8')) if isinstance(ds, (str, unicode_type)): gdal.PushErrorHandler() ds = gdal.Open(ds) gdal.PopErrorHandler() if ds is None: raise ValidateCloudOptimizedGeoTIFFException( 'Invalid file : %s' % gdal.GetLastErrorMsg()) if ds.GetDriver().ShortName != 'GTiff': raise ValidateCloudOptimizedGeoTIFFException( 'The file is not a GeoTIFF') details = {} errors = [] warnings = [] filename = ds.GetDescription() main_band = ds.GetRasterBand(1) ovr_count = main_band.GetOverviewCount() filelist = ds.GetFileList() if filelist is not None and filename + '.ovr' in filelist: errors += [ 'Overviews found in external .ovr file. They should be internal' ] if main_band.XSize > 512 or main_band.YSize > 512: if check_tiled: block_size = main_band.GetBlockSize() if block_size[0] == main_band.XSize and block_size[0] > 1024: errors += [ 'The file is greater than 512xH or Wx512, but is not tiled' ] if ovr_count == 0: warnings += [ 'The file is greater than 512xH or Wx512, it is recommended ' 'to include internal overviews' ] ifd_offset = int(main_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) ifd_offsets = [ifd_offset] block_order_row_major = False block_leader_size_as_uint4 = False block_trailer_last_4_bytes_repeated = False mask_interleaved_with_imagery = False if ifd_offset not in (8, 16): # Check if there is GDAL hidden structural metadata f = gdal.VSIFOpenL(filename, 'rb') if not f: raise ValidateCloudOptimizedGeoTIFFException("Cannot open file") signature = struct.unpack('B' * 4, gdal.VSIFReadL(4, 1, f)) bigtiff = signature in ((0x49, 0x49, 0x2B, 0x00), (0x4D, 0x4D, 0x00, 0x2B)) if bigtiff: expected_ifd_pos = 16 else: expected_ifd_pos = 8 gdal.VSIFSeekL(f, expected_ifd_pos, 0) pattern = "GDAL_STRUCTURAL_METADATA_SIZE=%06d bytes\n" % 0 got = gdal.VSIFReadL(len(pattern), 1, f).decode('LATIN1') if len(got) == len(pattern) and got.startswith( 'GDAL_STRUCTURAL_METADATA_SIZE='): size = int(got[len('GDAL_STRUCTURAL_METADATA_SIZE='):][0:6]) extra_md = gdal.VSIFReadL(size, 1, f).decode('LATIN1') block_order_row_major = 'BLOCK_ORDER=ROW_MAJOR' in extra_md block_leader_size_as_uint4 = 'BLOCK_LEADER=SIZE_AS_UINT4' in extra_md block_trailer_last_4_bytes_repeated = 'BLOCK_TRAILER=LAST_4_BYTES_REPEATED' in extra_md mask_interleaved_with_imagery = 'MASK_INTERLEAVED_WITH_IMAGERY=YES' in extra_md if 'KNOWN_INCOMPATIBLE_EDITION=YES' in extra_md: errors += [ "KNOWN_INCOMPATIBLE_EDITION=YES is declared in the file" ] expected_ifd_pos += len(pattern) + size expected_ifd_pos += expected_ifd_pos % 2 # IFD offset starts on a 2-byte boundary gdal.VSIFCloseL(f) if expected_ifd_pos != ifd_offsets[0]: errors += [ 'The offset of the main IFD should be %d. It is %d instead' % (expected_ifd_pos, ifd_offsets[0]) ] details['ifd_offsets'] = {} details['ifd_offsets']['main'] = ifd_offset for i in range(ovr_count): # Check that overviews are by descending sizes ovr_band = ds.GetRasterBand(1).GetOverview(i) if i == 0: if (ovr_band.XSize > main_band.XSize or ovr_band.YSize > main_band.YSize): errors += [ 'First overview has larger dimension than main band' ] else: prev_ovr_band = ds.GetRasterBand(1).GetOverview(i - 1) if (ovr_band.XSize > prev_ovr_band.XSize or ovr_band.YSize > prev_ovr_band.YSize): errors += [ 'Overview of index %d has larger dimension than ' 'overview of index %d' % (i, i - 1) ] if check_tiled: block_size = ovr_band.GetBlockSize() if block_size[0] == ovr_band.XSize and block_size[0] > 1024: errors += ['Overview of index %d is not tiled' % i] # Check that the IFD of descending overviews are sorted by increasing # offsets ifd_offset = int(ovr_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) ifd_offsets.append(ifd_offset) details['ifd_offsets']['overview_%d' % i] = ifd_offset if ifd_offsets[-1] < ifd_offsets[-2]: if i == 0: errors += [ 'The offset of the IFD for overview of index %d is %d, ' 'whereas it should be greater than the one of the main ' 'image, which is at byte %d' % (i, ifd_offsets[-1], ifd_offsets[-2]) ] else: errors += [ 'The offset of the IFD for overview of index %d is %d, ' 'whereas it should be greater than the one of index %d, ' 'which is at byte %d' % (i, ifd_offsets[-1], i - 1, ifd_offsets[-2]) ] # Check that the imagery starts by the smallest overview and ends with # the main resolution dataset def get_block_offset(band): blockxsize, blockysize = band.GetBlockSize() for y in range(int((band.YSize + blockysize - 1) / blockysize)): for x in range(int((band.XSize + blockxsize - 1) / blockxsize)): block_offset = band.GetMetadataItem( 'BLOCK_OFFSET_%d_%d' % (x, y), 'TIFF') if block_offset: return int(block_offset) return 0 block_offset = get_block_offset(main_band) data_offsets = [block_offset] details['data_offsets'] = {} details['data_offsets']['main'] = block_offset for i in range(ovr_count): ovr_band = ds.GetRasterBand(1).GetOverview(i) block_offset = get_block_offset(ovr_band) data_offsets.append(block_offset) details['data_offsets']['overview_%d' % i] = block_offset if data_offsets[-1] != 0 and data_offsets[-1] < ifd_offsets[-1]: if ovr_count > 0: errors += [ 'The offset of the first block of the smallest overview ' 'should be after its IFD' ] else: errors += [ 'The offset of the first block of the image should ' 'be after its IFD' ] for i in range(len(data_offsets) - 2, 0, -1): if data_offsets[i] != 0 and data_offsets[i] < data_offsets[i + 1]: errors += [ 'The offset of the first block of overview of index %d should ' 'be after the one of the overview of index %d' % (i - 1, i) ] if len(data_offsets) >= 2 and data_offsets[0] != 0 and data_offsets[ 0] < data_offsets[1]: errors += [ 'The offset of the first block of the main resolution image ' 'should be after the one of the overview of index %d' % (ovr_count - 1) ] if full_check and (block_order_row_major or block_leader_size_as_uint4 or block_trailer_last_4_bytes_repeated or mask_interleaved_with_imagery): f = gdal.VSIFOpenL(filename, 'rb') if not f: raise ValidateCloudOptimizedGeoTIFFException("Cannot open file") full_check_band(f, 'Main resolution image', main_band, errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, mask_interleaved_with_imagery) if main_band.GetMaskFlags() == gdal.GMF_PER_DATASET and \ (filename + '.msk') not in ds.GetFileList(): full_check_band(f, 'Mask band of main resolution image', main_band.GetMaskBand(), errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, False) for i in range(ovr_count): ovr_band = ds.GetRasterBand(1).GetOverview(i) full_check_band(f, 'Overview %d' % i, ovr_band, errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, mask_interleaved_with_imagery) if ovr_band.GetMaskFlags() == gdal.GMF_PER_DATASET and \ (filename + '.msk') not in ds.GetFileList(): full_check_band(f, 'Mask band of overview %d' % i, ovr_band.GetMaskBand(), errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, False) gdal.VSIFCloseL(f) return warnings, errors, details
def ogr_rfc41_6(): ds = ogr.GetDriverByName('memory').CreateDataSource('') sr = osr.SpatialReference() lyr = ds.CreateLayer('poly', geom_type=ogr.wkbPolygon, srs=sr) lyr.GetLayerDefn().GetGeomFieldDefn(0).SetName('geomfield') lyr.CreateField(ogr.FieldDefn('intfield', ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn('wkt', ogr.OFTString)) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('intfield', 1) feat.SetField('wkt', 'POINT (0 0)') feat.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POLYGON EMPTY')) lyr.CreateFeature(feat) feat = ogr.Feature(lyr.GetLayerDefn()) lyr.CreateFeature(feat) feat = None # Test implicit geometry column (since poly has one single geometry column) # then explicit geometry column for sql in [ 'SELECT intfield FROM poly', 'SELECT * FROM poly', 'SELECT intfield, geomfield FROM poly', 'SELECT geomfield, intfield FROM poly' ]: sql_lyr = ds.ExecuteSQL(sql) if sql_lyr.GetLayerDefn().GetGeomFieldDefn( 0).GetType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetField('intfield') != 1: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('geomfield') is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('geomfield') is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(geometry_field AS GEOMETRY) sql_lyr = ds.ExecuteSQL( 'SELECT CAST(geomfield AS GEOMETRY) AS mygeom FROM poly WHERE CAST(geomfield AS GEOMETRY) IS NOT NULL' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is not None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(xxx AS GEOMETRY(POLYGON)) sql_lyr = ds.ExecuteSQL( 'SELECT CAST(geomfield AS GEOMETRY(POLYGON)) AS mygeom FROM poly WHERE CAST(geomfield AS GEOMETRY(POLYGON)) IS NOT NULL' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is not None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(xxx AS GEOMETRY(POLYGON,4326)) sql_lyr = ds.ExecuteSQL( 'SELECT CAST(geomfield AS GEOMETRY(POLYGON,4326)) AS mygeom FROM poly WHERE CAST(geomfield AS GEOMETRY(POLYGON,4326)) IS NOT NULL' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn( 0).GetSpatialRef().ExportToWkt().find('4326') < 0: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_multipolygon AS GEOMETRY(POLYGON)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('MULTIPOLYGON (((0 0,0 1,1 1,1 0,0 0)))' AS GEOMETRY(POLYGON)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef( 'mygeom').ExportToWkt() != 'POLYGON ((0 0,0 1,1 1,1 0,0 0))': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_polygon AS GEOMETRY(MULTIPOLYGON)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('POLYGON ((0 0,0 1,1 1,1 0,0 0))' AS GEOMETRY(MULTIPOLYGON)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom').ExportToWkt( ) != 'MULTIPOLYGON (((0 0,0 1,1 1,1 0,0 0)))': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_multilinestring AS GEOMETRY(LINESTRING)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('MULTILINESTRING ((0 0,0 1,1 1,1 0,0 0))' AS GEOMETRY(LINESTRING)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef( 'mygeom').ExportToWkt() != 'LINESTRING (0 0,0 1,1 1,1 0,0 0)': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_linestring AS GEOMETRY(MULTILINESTRING)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('LINESTRING (0 0,0 1,1 1,1 0,0 0)' AS GEOMETRY(MULTILINESTRING)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom').ExportToWkt( ) != 'MULTILINESTRING ((0 0,0 1,1 1,1 0,0 0))': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test expression with cast CHARACTER <--> GEOMETRY sql_lyr = ds.ExecuteSQL( 'SELECT CAST(CAST(geomfield AS CHARACTER) AS GEOMETRY) AS mygeom, intfield FROM poly' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is not None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetField('intfield') != 1: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(NULL AS GEOMETRY) sql_lyr = ds.ExecuteSQL('SELECT CAST(NULL AS GEOMETRY) FROM poly') if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('') is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(stringfield AS GEOMETRY) sql_lyr = ds.ExecuteSQL('SELECT CAST(wkt AS GEOMETRY) FROM poly') if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('wkt').ExportToWkt() != 'POINT (0 0)': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test COUNT(geometry) sql_lyr = ds.ExecuteSQL('SELECT COUNT(geomfield) FROM poly') feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' if feat.GetField(0) != 1: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) wrong_sql_list = [ ('SELECT DISTINCT geomfield FROM poly', 'SELECT DISTINCT on a geometry not supported'), ('SELECT COUNT(DISTINCT geomfield) FROM poly', 'SELECT COUNT DISTINCT on a geometry not supported'), ('SELECT MAX(geomfield) FROM poly', 'Use of field function MAX() on geometry field'), ('SELECT CAST(5 AS GEOMETRY) FROM poly', 'Cannot cast integer to geometry'), ('SELECT CAST(geomfield AS integer) FROM poly', 'Cannot cast geometry to integer'), ('SELECT CAST(geomfield AS GEOMETRY(2)) FROM poly', 'First argument of CAST operator should be a geometry type identifier' ), ('SELECT CAST(geomfield AS GEOMETRY(UNSUPPORTED_TYPE)) FROM poly', 'SQL Expression Parsing Error: syntax error'), ('SELECT CAST(geomfield AS GEOMETRY(UNSUPPORTED_TYPE,5)) FROM poly', 'SQL Expression Parsing Error: syntax error'), ] for (sql, error_msg) in wrong_sql_list: gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr = ds.ExecuteSQL(sql) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find(error_msg) != 0: gdaltest.post_reason('fail') print('For %s, expected error %s, got %s' % (sql, error_msg, gdal.GetLastErrorMsg())) return 'fail' if sql_lyr is not None: gdaltest.post_reason('fail') return 'fail' # Test invalid expressions with geometry for sql in [ "SELECT geomfield + 'a' FROM poly", "SELECT geomfield * 'a' FROM poly", "SELECT geomfield + 'a' FROM poly", "SELECT geomfield - 'a' FROM poly", "SELECT geomfield % 'a' FROM poly", "SELECT CONCAT(geomfield, 'a') FROM poly", "SELECT SUBSTR(geomfield, 0, 1) FROM poly", "SELECT * FROM poly WHERE geomfield = CAST('POINT EMPTY' AS GEOMETRY)", "SELECT * FROM poly WHERE geomfield LIKE 'a'", "SELECT * FROM poly WHERE geomfield IN( 'a' )" ]: gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr = ds.ExecuteSQL(sql) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Cannot use geometry field in this operation') != 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' if sql_lyr is not None: gdaltest.post_reason('fail') return 'fail' # Test expression with geometry in WHERE sql_lyr = ds.ExecuteSQL('SELECT * FROM poly WHERE geomfield IS NOT NULL') feat = sql_lyr.GetNextFeature() if feat.GetField('intfield') != 1: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL('SELECT * FROM poly WHERE geomfield IS NULL') feat = sql_lyr.GetNextFeature() if feat.IsFieldSet(0): gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL( "SELECT * FROM poly WHERE CAST(geomfield AS CHARACTER) = 'POLYGON EMPTY'" ) feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL( 'SELECT count(*) FROM poly WHERE geomfield IS NULL') feat = sql_lyr.GetNextFeature() if feat.GetField(0) != 1: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL( 'SELECT count(*) FROM poly WHERE geomfield IS NOT NULL') feat = sql_lyr.GetNextFeature() if feat.GetField(0) != 1: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test spatial filter feat = lyr.GetFeature(0) feat.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POINT(1 2)')) lyr.SetFeature(feat) feat = None lyr.DeleteFeature(1) sql_lyr = ds.ExecuteSQL("SELECT * FROM poly") sql_lyr.SetSpatialFilterRect(0, 0, 0, 0) feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None sql_lyr.SetSpatialFilterRect(0, 1, 2, 1, 2) feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' feat = None # Test invalid spatial filter index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr.SetSpatialFilterRect(2, 0, 0, 0, 0) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' # Test invalid geometry field index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr.GetExtent(geom_field=2) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds.ReleaseResultSet(sql_lyr) # Test querying several geometry fields sql_lyr = ds.ExecuteSQL( 'SELECT geomfield as geom1, geomfield as geom2 FROM poly') feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('geom1') is None: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('geom2') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test querying a layer with several geometry fields lyr.CreateGeomField(ogr.GeomFieldDefn('secondarygeom', ogr.wkbPoint)) lyr.ResetReading() feat = lyr.GetNextFeature() feat.SetGeomField('secondarygeom', ogr.CreateGeometryFromWkt('POINT (10 100)')) lyr.SetFeature(feat) feat = None for sql in [ 'SELECT * FROM poly', 'SELECT geomfield, secondarygeom FROM poly', 'SELECT secondarygeom, geomfield FROM poly' ]: sql_lyr = ds.ExecuteSQL(sql) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('geomfield').ExportToWkt() != 'POINT (1 2)': gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef( 'secondarygeom').ExportToWkt() != 'POINT (10 100)': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Check that we don't get an implicit geometry field sql_lyr = ds.ExecuteSQL('SELECT intfield FROM poly') if sql_lyr.GetLayerDefn().GetGeomFieldCount() != 0: gdaltest.post_reason('fail') return 'fail' ds.ReleaseResultSet(sql_lyr) # Check GetExtent() and SetSpatialFilter() sql_lyr = ds.ExecuteSQL('SELECT * FROM poly') if sql_lyr.GetExtent(geom_field=0) != (1.0, 1.0, 2.0, 2.0): gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetExtent(geom_field=1) != (10.0, 10.0, 100.0, 100.0): gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(0, 0.5, 1.5, 1.5, 2.5) if sql_lyr.GetFeatureCount() != 1: gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(0, 0, 0, 0.5, 0.5) if sql_lyr.GetFeatureCount() != 0: gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(1, 9, 99, 11, 101) if sql_lyr.GetFeatureCount() != 1: gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(1, 0, 0, 0.5, 0.5) if sql_lyr.GetFeatureCount() != 0: gdaltest.post_reason('fail') return 'fail' ds.ReleaseResultSet(sql_lyr) ds = None return 'success'
def testProj(self): import osr_ct osr_ct.osr_ct_1() if gdaltest.have_proj4 == 0: return 'skip' if self.requirements is not None and self.requirements[:5] == 'GRID:': try: proj_lib = os.environ['PROJ_LIB'] except: #print( 'PROJ_LIB unset, skipping test.' ) return 'skip' try: open( proj_lib + '/' + self.requirements[5:] ) except: #print( 'Did not find GRID:%s' % self.requirements[5:] ) return 'skip' src = osr.SpatialReference() if src.SetFromUserInput( self.src_srs ) != 0: gdaltest.post_reason('SetFromUserInput(%s) failed.' % self.src_srs) return 'fail' dst = osr.SpatialReference() if dst.SetFromUserInput( self.dst_srs ) != 0: gdaltest.post_reason('SetFromUserInput(%s) failed.' % self.dst_srs) return 'fail' if self.requirements is not None and self.requirements[0] != 'G': additionnal_error_str = ' Check that proj version is >= %s ' % self.requirements else: additionnal_error_str = '' try: gdal.PushErrorHandler( 'CPLQuietErrorHandler' ) ct = osr.CoordinateTransformation( src, dst ) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1: gdaltest.post_reason( 'PROJ.4 missing, transforms not available.' ) return 'skip' except ValueError: gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1: gdaltest.post_reason( 'PROJ.4 missing, transforms not available.' ) return 'skip' else: gdaltest.post_reason( 'failed to create coordinate transformation. %s' % gdal.GetLastErrorMsg()) return 'fail' except: gdal.PopErrorHandler() gdaltest.post_reason( 'failed to create coordinate transformation. %s' % gdal.GetLastErrorMsg()) return 'fail' ###################################################################### # Transform source point to destination SRS. result = ct.TransformPoint( self.src_xyz[0], self.src_xyz[1], self.src_xyz[2] ) error = abs(result[0] - self.dst_xyz[0]) \ + abs(result[1] - self.dst_xyz[1]) \ + abs(result[2] - self.dst_xyz[2]) if error > self.dst_error: gdaltest.post_reason( 'Dest error is %g, got (%.15g,%.15g,%.15g)%s' \ % (error, result[0], result[1], result[2], additionnal_error_str) ) return 'fail' ###################################################################### # Now transform back. ct = osr.CoordinateTransformation( dst, src ) result = ct.TransformPoint( result[0], result[1], result[2] ) error = abs(result[0] - self.src_xyz[0]) \ + abs(result[1] - self.src_xyz[1]) \ + abs(result[2] - self.src_xyz[2]) if error > self.src_error: gdaltest.post_reason( 'Back to source error is %g.%s' % (error, additionnal_error_str) ) return 'fail' return 'success'
def main( argv = None ): bComputeMinMax = False bShowGCPs = True bShowMetadata = True bShowRAT=True bStats = False bApproxStats = True bShowColorTable = True bComputeChecksum = False bReportHistograms = False pszFilename = None papszExtraMDDomains = [ ] pszProjection = None hTransform = None bShowFileList = True #/* Must process GDAL_SKIP before GDALAllRegister(), but we can't call */ #/* GDALGeneralCmdLineProcessor before it needs the drivers to be registered */ #/* for the --format or --formats options */ #for( i = 1; i < argc; i++ ) #{ # if EQUAL(argv[i],"--config") and i + 2 < argc and EQUAL(argv[i + 1], "GDAL_SKIP"): # { # CPLSetConfigOption( argv[i+1], argv[i+2] ); # # i += 2; # } #} # #GDALAllRegister(); if argv is None: argv = sys.argv argv = gdal.GeneralCmdLineProcessor( argv ) if argv is None: return 1 nArgc = len(argv) #/* -------------------------------------------------------------------- */ #/* Parse arguments. */ #/* -------------------------------------------------------------------- */ i = 1 while i < nArgc: if EQUAL(argv[i], "--utility_version"): print("%s is running against GDAL %s" % (argv[0], gdal.VersionInfo("RELEASE_NAME"))) return 0 elif EQUAL(argv[i], "-mm"): bComputeMinMax = True elif EQUAL(argv[i], "-hist"): bReportHistograms = True elif EQUAL(argv[i], "-stats"): bStats = True bApproxStats = False elif EQUAL(argv[i], "-approx_stats"): bStats = True bApproxStats = True elif EQUAL(argv[i], "-checksum"): bComputeChecksum = True elif EQUAL(argv[i], "-nogcp"): bShowGCPs = False elif EQUAL(argv[i], "-nomd"): bShowMetadata = False elif EQUAL(argv[i], "-norat"): bShowRAT = False elif EQUAL(argv[i], "-noct"): bShowColorTable = False elif EQUAL(argv[i], "-mdd") and i < nArgc-1: i = i + 1 papszExtraMDDomains.append( argv[i] ) elif EQUAL(argv[i], "-nofl"): bShowFileList = False elif argv[i][0] == '-': return Usage() elif pszFilename is None: pszFilename = argv[i] else: return Usage() i = i + 1 if pszFilename is None: return Usage() #/* -------------------------------------------------------------------- */ #/* Open dataset. */ #/* -------------------------------------------------------------------- */ hDataset = gdal.Open( pszFilename, gdal.GA_ReadOnly ) if hDataset is None: print("gdalinfo failed - unable to open '%s'." % pszFilename ) return 1 #/* -------------------------------------------------------------------- */ #/* Report general info. */ #/* -------------------------------------------------------------------- */ hDriver = hDataset.GetDriver(); print( "Driver: %s/%s" % ( \ hDriver.ShortName, \ hDriver.LongName )) papszFileList = hDataset.GetFileList(); if papszFileList is None or len(papszFileList) == 0: print( "Files: none associated" ) else: print( "Files: %s" % papszFileList[0] ) if bShowFileList: for i in range(1, len(papszFileList)): print( " %s" % papszFileList[i] ) print( "Size is %d, %d" % (hDataset.RasterXSize, hDataset.RasterYSize)) #/* -------------------------------------------------------------------- */ #/* Report projection. */ #/* -------------------------------------------------------------------- */ pszProjection = hDataset.GetProjectionRef() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection ) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) print( "Coordinate System is:\n%s" % pszPrettyWkt ) else: print( "Coordinate System is `%s'" % pszProjection ) #/* -------------------------------------------------------------------- */ #/* Report Geotransform. */ #/* -------------------------------------------------------------------- */ adfGeoTransform = hDataset.GetGeoTransform(can_return_null = True) if adfGeoTransform is not None: if adfGeoTransform[2] == 0.0 and adfGeoTransform[4] == 0.0: print( "Origin = (%.15f,%.15f)" % ( \ adfGeoTransform[0], adfGeoTransform[3] )) print( "Pixel Size = (%.15f,%.15f)" % ( \ adfGeoTransform[1], adfGeoTransform[5] )) else: print( "GeoTransform =\n" \ " %.16g, %.16g, %.16g\n" \ " %.16g, %.16g, %.16g" % ( \ adfGeoTransform[0], \ adfGeoTransform[1], \ adfGeoTransform[2], \ adfGeoTransform[3], \ adfGeoTransform[4], \ adfGeoTransform[5] )) #/* -------------------------------------------------------------------- */ #/* Report GCPs. */ #/* -------------------------------------------------------------------- */ if bShowGCPs and hDataset.GetGCPCount() > 0: pszProjection = hDataset.GetGCPProjection() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection ) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) print( "GCP Projection = \n%s" % pszPrettyWkt ) else: print( "GCP Projection = %s" % \ pszProjection ) gcps = hDataset.GetGCPs() i = 0 for gcp in gcps: print( "GCP[%3d]: Id=%s, Info=%s\n" \ " (%.15g,%.15g) -> (%.15g,%.15g,%.15g)" % ( \ i, gcp.Id, gcp.Info, \ gcp.GCPPixel, gcp.GCPLine, \ gcp.GCPX, gcp.GCPY, gcp.GCPZ )) i = i + 1 #/* -------------------------------------------------------------------- */ #/* Report metadata. */ #/* -------------------------------------------------------------------- */ if bShowMetadata: papszMetadata = hDataset.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) if bShowMetadata: for extra_domain in papszExtraMDDomains: papszMetadata = hDataset.GetMetadata_List(extra_domain) if papszMetadata is not None and len(papszMetadata) > 0 : print( "Metadata (%s):" % extra_domain) for metadata in papszMetadata: print( " %s" % metadata ) #/* -------------------------------------------------------------------- */ #/* Report "IMAGE_STRUCTURE" metadata. */ #/* -------------------------------------------------------------------- */ if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "Image Structure Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) #/* -------------------------------------------------------------------- */ #/* Report subdatasets. */ #/* -------------------------------------------------------------------- */ papszMetadata = hDataset.GetMetadata_List("SUBDATASETS") if papszMetadata is not None and len(papszMetadata) > 0 : print( "Subdatasets:" ) for metadata in papszMetadata: print( " %s" % metadata ) #/* -------------------------------------------------------------------- */ #/* Report geolocation. */ #/* -------------------------------------------------------------------- */ if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("GEOLOCATION") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "Geolocation:" ) for metadata in papszMetadata: print( " %s" % metadata ) #/* -------------------------------------------------------------------- */ #/* Report RPCs */ #/* -------------------------------------------------------------------- */ if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("RPC") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "RPC Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) #/* -------------------------------------------------------------------- */ #/* Setup projected to lat/long transform if appropriate. */ #/* -------------------------------------------------------------------- */ if pszProjection is not None and len(pszProjection) > 0: hProj = osr.SpatialReference( pszProjection ) if hProj is not None: hLatLong = hProj.CloneGeogCS() if hLatLong is not None: gdal.PushErrorHandler( 'CPLQuietErrorHandler' ) hTransform = osr.CoordinateTransformation( hProj, hLatLong ) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Unable to load PROJ.4 library' ) != -1: hTransform = None #/* -------------------------------------------------------------------- */ #/* Report corners. */ #/* -------------------------------------------------------------------- */ print( "Corner Coordinates:" ) GDALInfoReportCorner( hDataset, hTransform, "Upper Left", \ 0.0, 0.0 ); GDALInfoReportCorner( hDataset, hTransform, "Lower Left", \ 0.0, hDataset.RasterYSize); GDALInfoReportCorner( hDataset, hTransform, "Upper Right", \ hDataset.RasterXSize, 0.0 ); GDALInfoReportCorner( hDataset, hTransform, "Lower Right", \ hDataset.RasterXSize, \ hDataset.RasterYSize ); GDALInfoReportCorner( hDataset, hTransform, "Center", \ hDataset.RasterXSize/2.0, \ hDataset.RasterYSize/2.0 ); #/* ==================================================================== */ #/* Loop over bands. */ #/* ==================================================================== */ for iBand in range(hDataset.RasterCount): hBand = hDataset.GetRasterBand(iBand+1 ) #if( bSample ) #{ # float afSample[10000]; # int nCount; # # nCount = GDALGetRandomRasterSample( hBand, 10000, afSample ); # print( "Got %d samples.\n", nCount ); #} (nBlockXSize, nBlockYSize) = hBand.GetBlockSize() print( "Band %d Block=%dx%d Type=%s, ColorInterp=%s" % ( iBand+1, \ nBlockXSize, nBlockYSize, \ gdal.GetDataTypeName(hBand.DataType), \ gdal.GetColorInterpretationName( \ hBand.GetRasterColorInterpretation()) )) if hBand.GetDescription() is not None \ and len(hBand.GetDescription()) > 0 : print( " Description = %s" % hBand.GetDescription() ) dfMin = hBand.GetMinimum() dfMax = hBand.GetMaximum() if dfMin is not None or dfMax is not None or bComputeMinMax: line = " " if dfMin is not None: line = line + ("Min=%.3f " % dfMin) if dfMax is not None: line = line + ("Max=%.3f " % dfMax) if bComputeMinMax: gdal.ErrorReset() adfCMinMax = hBand.ComputeRasterMinMax(False) if gdal.GetLastErrorType() == gdal.CE_None: line = line + ( " Computed Min/Max=%.3f,%.3f" % ( \ adfCMinMax[0], adfCMinMax[1] )) print( line ) stats = hBand.GetStatistics( bApproxStats, bStats) # Dirty hack to recognize if stats are valid. If invalid, the returned # stddev is negative if stats[3] >= 0.0: print( " Minimum=%.3f, Maximum=%.3f, Mean=%.3f, StdDev=%.3f" % ( \ stats[0], stats[1], stats[2], stats[3] )) if bReportHistograms: hist = hBand.GetDefaultHistogram(force = True, callback = gdal.TermProgress) if hist is not None: dfMin = hist[0] dfMax = hist[1] nBucketCount = hist[2] panHistogram = hist[3] print( " %d buckets from %g to %g:" % ( \ nBucketCount, dfMin, dfMax )) line = ' ' for bucket in panHistogram: line = line + ("%d " % bucket) print(line) if bComputeChecksum: print( " Checksum=%d" % hBand.Checksum()) dfNoData = hBand.GetNoDataValue() if dfNoData is not None: if dfNoData != dfNoData: print( " NoData Value=nan" ) else: print( " NoData Value=%.18g" % dfNoData ) if hBand.GetOverviewCount() > 0: line = " Overviews: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0 : line = line + ", " hOverview = hBand.GetOverview( iOverview ); if hOverview is not None: line = line + ( "%dx%d" % (hOverview.XSize, hOverview.YSize)) pszResampling = \ hOverview.GetMetadataItem( "RESAMPLING", "" ) if pszResampling is not None \ and len(pszResampling) >= 12 \ and EQUAL(pszResampling[0:12],"AVERAGE_BIT2"): line = line + "*" else: line = line + "(null)" print(line) if bComputeChecksum: line = " Overviews checksum: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hBand.GetOverview( iOverview ); if hOverview is not None: line = line + ( "%d" % hOverview.Checksum()) else: line = line + "(null)" print(line) if hBand.HasArbitraryOverviews(): print( " Overviews: arbitrary" ) nMaskFlags = hBand.GetMaskFlags() if (nMaskFlags & (gdal.GMF_NODATA|gdal.GMF_ALL_VALID)) == 0: hMaskBand = hBand.GetMaskBand() line = " Mask Flags: " if (nMaskFlags & gdal.GMF_PER_DATASET) != 0: line = line + "PER_DATASET " if (nMaskFlags & gdal.GMF_ALPHA) != 0: line = line + "ALPHA " if (nMaskFlags & gdal.GMF_NODATA) != 0: line = line + "NODATA " if (nMaskFlags & gdal.GMF_ALL_VALID) != 0: line = line + "ALL_VALID " print(line) if hMaskBand is not None and \ hMaskBand.GetOverviewCount() > 0: line = " Overviews of mask band: " for iOverview in range(hMaskBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hMaskBand.GetOverview( iOverview ); if hOverview is not None: line = line + ( "%d" % hOverview.Checksum()) else: line = line + "(null)" if len(hBand.GetUnitType()) > 0: print( " Unit Type: %s" % hBand.GetUnitType()) papszCategories = hBand.GetRasterCategoryNames() if papszCategories is not None: print( " Categories:" ); i = 0 for category in papszCategories: print( " %3d: %s" % (i, category) ) i = i + 1 if hBand.GetScale() != 1.0 or hBand.GetOffset() != 0.0: print( " Offset: %.15g, Scale:%.15g" % \ ( hBand.GetOffset(), hBand.GetScale())) if bShowMetadata: papszMetadata = hBand.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( " Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) if bShowMetadata: papszMetadata = hBand.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( " Image Structure Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) hTable = hBand.GetRasterColorTable() if hBand.GetRasterColorInterpretation() == gdal.GCI_PaletteIndex \ and hTable is not None: print( " Color Table (%s with %d entries)" % (\ gdal.GetPaletteInterpretationName( \ hTable.GetPaletteInterpretation( )), \ hTable.GetCount() )) if bShowColorTable: for i in range(hTable.GetCount()): sEntry = hTable.GetColorEntry(i) print( " %3d: %d,%d,%d,%d" % ( \ i, \ sEntry[0],\ sEntry[1],\ sEntry[2],\ sEntry[3] )) if bShowRAT: pass #hRAT = hBand.GetDefaultRAT() #GDALRATDumpReadable( hRAT, None ); return 0
def ogrupdate_process(src_layer, dst_layer, matchfieldname=None, update_mode=DEFAULT, preserve_fid=False, compare_before_update=False, papszSelFields=None, dry_run=False, skip_failures=False, updated_count_out=None, updated_failed_out=None, inserted_count_out=None, inserted_failed_out=None, progress=None, progress_arg=None): src_layer_defn = src_layer.GetLayerDefn() dst_layer_defn = dst_layer.GetLayerDefn() if matchfieldname is not None: src_idx = src_layer.GetLayerDefn().GetFieldIndex(matchfieldname) if src_idx < 0: print('Cannot find field to match in source layer') return 1 src_type = src_layer.GetLayerDefn().GetFieldDefn(src_idx).GetType() dst_idx = dst_layer_defn.GetFieldIndex(matchfieldname) if dst_idx < 0: print('Cannot find field to match in destination layer') return 1 dst_type = dst_layer_defn.GetFieldDefn(dst_idx).GetType() if papszSelFields is not None: for layer_defn in [src_layer_defn, dst_layer_defn]: for fieldname in papszSelFields: idx = layer_defn.GetFieldIndex(fieldname) if idx < 0: if layer_defn == src_layer_defn: print("Cannot find field '%s' in source layer" % fieldname) else: print("Cannot find field '%s' in destination layer" % fieldname) return 1 if progress is not None: src_featurecount = src_layer.GetFeatureCount() updated_count = 0 inserted_count = 0 updated_failed = 0 inserted_failed = 0 ret = 0 iter_src_feature = 0 while True: src_feat = src_layer.GetNextFeature() if src_feat is None: break src_fid = src_feat.GetFID() iter_src_feature = iter_src_feature + 1 if progress is not None: if progress(iter_src_feature * 1.0 / src_featurecount, "", progress_arg) != 1: return 1 # Do we match on the FID ? if matchfieldname is None: dst_feat = dst_layer.GetFeature(src_fid) if dst_feat is None: if update_mode == UPDATE_ONLY: continue dst_feat = ogr.Feature(dst_layer_defn) dst_feat.SetFrom(src_feat) if preserve_fid: dst_feat.SetFID(src_fid) if dry_run: ret = 0 else: ret = dst_layer.CreateFeature(dst_feat) if ret == 0: inserted_count = inserted_count + 1 else: inserted_failed = inserted_failed + 1 elif update_mode == APPEND_ONLY: continue else: dst_fid = dst_feat.GetFID() assert dst_fid == src_fid if compare_before_update and AreFeaturesEqual( src_feat, dst_feat): continue if papszSelFields is not None: for fieldname in papszSelFields: fld_src_idx = src_layer_defn.GetFieldIndex(fieldname) fld_dst_idx = dst_layer_defn.GetFieldIndex(fieldname) if src_layer_defn.GetFieldDefn( fld_dst_idx).GetType() == ogr.OFTReal: dst_feat.SetField( fld_dst_idx, src_feat.GetFieldAsDouble(fld_src_idx)) elif src_layer_defn.GetFieldDefn( fld_dst_idx).GetType() == ogr.OFTInteger: dst_feat.SetField( fld_dst_idx, src_feat.GetFieldAsInteger(fld_src_idx)) else: dst_feat.SetField( fld_dst_idx, src_feat.GetFieldAsString(fld_src_idx)) else: dst_feat.SetFrom(src_feat) # resets the FID dst_feat.SetFID(dst_fid) if dry_run: ret = 0 else: ret = dst_layer.SetFeature(dst_feat) if ret == 0: updated_count = updated_count + 1 else: updated_failed = updated_failed + 1 # Or on a field ? else: dst_layer.ResetReading() if src_type == dst_type and src_type == ogr.OFTReal: val = src_feat.GetFieldAsDouble(src_idx) dst_layer.SetAttributeFilter("%s = %.18g" % (matchfieldname, val)) elif src_type == dst_type and src_type == ogr.OFTInteger: val = src_feat.GetFieldAsInteger(src_idx) dst_layer.SetAttributeFilter("%s = %d" % (matchfieldname, val)) else: val = src_feat.GetFieldAsString(src_idx) dst_layer.SetAttributeFilter("%s = '%s'" % (matchfieldname, val)) dst_feat = dst_layer.GetNextFeature() if dst_feat is None: if update_mode == UPDATE_ONLY: continue dst_feat = ogr.Feature(dst_layer_defn) dst_feat.SetFrom(src_feat) if preserve_fid: dst_feat.SetFID(src_fid) if dry_run: ret = 0 else: ret = dst_layer.CreateFeature(dst_feat) if ret == 0: inserted_count = inserted_count + 1 else: inserted_failed = inserted_failed + 1 elif update_mode == APPEND_ONLY: continue else: if compare_before_update and AreFeaturesEqual( src_feat, dst_feat): continue dst_fid = dst_feat.GetFID() if papszSelFields is not None: for fieldname in papszSelFields: fld_src_idx = src_layer_defn.GetFieldIndex(fieldname) fld_dst_idx = dst_layer_defn.GetFieldIndex(fieldname) if src_layer_defn.GetFieldDefn( fld_dst_idx).GetType() == ogr.OFTReal: dst_feat.SetField( fld_dst_idx, src_feat.GetFieldAsDouble(fld_src_idx)) elif src_layer_defn.GetFieldDefn( fld_dst_idx).GetType() == ogr.OFTInteger: dst_feat.SetField( fld_dst_idx, src_feat.GetFieldAsInteger(fld_src_idx)) else: dst_feat.SetField( fld_dst_idx, src_feat.GetFieldAsString(fld_src_idx)) else: dst_feat.SetFrom(src_feat) dst_feat.SetFID(dst_fid) if dry_run: ret = 0 else: ret = dst_layer.SetFeature(dst_feat) if ret == 0: updated_count = updated_count + 1 else: updated_failed = updated_failed + 1 if ret != 0: if not skip_failures: if gdal.GetLastErrorMsg() == '': print('An error occurred during feature insertion/update. ' 'Interrupting processing.') ret = 1 break else: ret = 0 if updated_count_out is not None and len(updated_count_out) == 1: updated_count_out[0] = updated_count if updated_failed_out is not None and len(updated_failed_out) == 1: updated_failed_out[0] = updated_failed if inserted_count_out is not None and len(inserted_count_out) == 1: inserted_count_out[0] = inserted_count if inserted_failed_out is not None and len(inserted_failed_out) == 1: inserted_failed_out[0] = inserted_failed return ret