def getSupportedRasters(): if not gdalAvailable: return {} if GdalUtils.supportedRasters is not None: return GdalUtils.supportedRasters if gdal.GetDriverCount() == 0: gdal.AllRegister() GdalUtils.supportedRasters = {} GdalUtils.supportedOutputRasters = {} GdalUtils.supportedRasters['GTiff'] = ['tif'] GdalUtils.supportedOutputRasters['GTiff'] = ['tif'] for i in range(gdal.GetDriverCount()): driver = gdal.GetDriver(i) if driver is None: continue shortName = driver.ShortName metadata = driver.GetMetadata() if gdal.DCAP_RASTER not in metadata \ or metadata[gdal.DCAP_RASTER] != 'YES': continue if gdal.DMD_EXTENSION in metadata: extensions = metadata[gdal.DMD_EXTENSION].split('/') if extensions: GdalUtils.supportedRasters[shortName] = extensions # Only creatable rasters can be referenced in output rasters if ((gdal.DCAP_CREATE in metadata and metadata[gdal.DCAP_CREATE] == 'YES') or (gdal.DCAP_CREATECOPY in metadata and metadata[gdal.DCAP_CREATECOPY] == 'YES')): GdalUtils.supportedOutputRasters[ shortName] = extensions return GdalUtils.supportedRasters
def raster_extensions(self): """Get the list of supported raster types from the GDAL drivers.""" self.driver_list = list() self.driver_list_description = list() # iterate through the GDAL drivers and get the supported extension list for d in range(0, gdal.GetDriverCount()): driver = gdal.GetDriver(d) metadata = driver.GetMetadata() if metadata.has_key('DMD_EXTENSION'): self.driver_list.append(metadata['DMD_EXTENSION']) self.driver_list_description.append(metadata['DMD_LONGNAME']) self.driver_list_description.sort() return self.driver_list
def listFormats(self,short=False): nd = gdal.GetDriverCount() wdr = [] for x in xrange(nd): drv = gdal.GetDriver(x) if( ('DCAP_CREATE' in drv.GetMetadata() and drv.GetMetadata()['DCAP_CREATE'] == "YES") or ('DCAP_CREATECOPY' in drv.GetMetadata() and drv.GetMetadata()['DCAP_CREATECOPY'] == "YES")): if short: wdr += [drv.GetDescription()] else: wdr += [drv.GetMetadata()['DMD_LONGNAME']+" ("+drv.GetDescription()+")"] return wdr
def get_allowed_drivers(url: str) -> list: """ Returns allowed drivers for OpenEx given the file type of [url] """ allowed_drivers = [ gdal.GetDriver(i).GetDescription() for i in range(gdal.GetDriverCount()) ] _, extension = os.path.splitext(os.path.basename(url)) if extension == ".csv": allowed_drivers = [ driver for driver in allowed_drivers if "JSON" not in driver ] return allowed_drivers
def GetOutputDriversFor(filename): drv_list = [] ext = GetExtension(filename) for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) if (drv.GetMetadataItem(gdal.DCAP_CREATE) is not None or drv.GetMetadataItem(gdal.DCAP_CREATECOPY) is not None) and \ drv.GetMetadataItem(gdal.DCAP_VECTOR) is not None: if ext and DoesDriverHandleExtension(drv, ext): drv_list.append(drv.ShortName) else: prefix = drv.GetMetadataItem(gdal.DMD_CONNECTION_PREFIX) if prefix is not None and filename.lower().startswith(prefix.lower()): drv_list.append(drv.ShortName) return drv_list
def test_misc_5(): gdal.PushErrorHandler('CPLQuietErrorHandler') try: shutil.rmtree('tmp/tmp') except OSError: pass try: os.mkdir('tmp/tmp') except OSError: try: os.stat('tmp/tmp') # Hum the directory already exists... Not expected, but let's try to go on except OSError: pytest.fail('Cannot create tmp/tmp') # This is to speed-up the runtime of tests on EXT4 filesystems # Do not use this for production environment if you care about data safety # w.r.t system/OS crashes, unless you know what you are doing. gdal.SetConfigOption('OGR_SQLITE_SYNCHRONOUS', 'OFF') # Test Create() with various band numbers, including 0 for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) md = drv.GetMetadata() if drv.ShortName == 'PDF': # PDF Create() is vector-only continue if drv.ShortName == 'MBTiles': # MBTiles only support some precise resolutions continue if 'DCAP_CREATE' in md and 'DCAP_RASTER' in md: datatype = gdal.GDT_Byte for nBands in range(6): _misc_5_internal(drv, datatype, nBands) for nBands in [1, 3]: for datatype in (gdal.GDT_UInt16, gdal.GDT_Int16, gdal.GDT_UInt32, gdal.GDT_Int32, gdal.GDT_Float32, gdal.GDT_Float64, gdal.GDT_CInt16, gdal.GDT_CInt32, gdal.GDT_CFloat32, gdal.GDT_CFloat64): _misc_5_internal(drv, datatype, nBands) gdal.PopErrorHandler()
def gdal_import_formats(): formats = "" for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) if drv.GetMetadataItem(gdal.DCAP_RASTER) and drv.GetMetadataItem(gdal.DCAP_OPEN): typ = drv.GetMetadataItem(gdal.DMD_LONGNAME).split("(")[0].strip() exts = drv.GetMetadataItem(gdal.DMD_EXTENSIONS) extensions = "" if exts is None: extensions = "*" else: for ex in exts.split(" "): if ex == "": ex = "*" extensions += "*." + ex + " " formats += typ + " (" + extensions[:-1] + ");;" if formats[-1] == ";": formats = formats[:-2] formats = ";;".join(sorted(formats.split(";;"))) return formats
def grib_online_1(): jp2drv_found = False for i in range(gdal.GetDriverCount()): if gdal.GetDriver(i).ShortName.startswith('JP2'): jp2drv_found = True break if not jp2drv_found: return 'skip' filename = 'CMC_hrdps_continental_PRATE_SFC_0_ps2.5km_2017111712_P001-00.grib2' if not gdaltest.download_file('http://download.osgeo.org/gdal/data/grib/' + filename): return 'skip' ds = gdal.Open('tmp/cache/' + filename) cs = ds.GetRasterBand(1).Checksum() if cs == 0: gdaltest.post_reason('Could not open file') print(cs) return 'fail' md = ds.GetRasterBand(1).GetMetadata() expected_md = { 'GRIB_REF_TIME': ' 1510920000 sec UTC', 'GRIB_VALID_TIME': ' 1510923600 sec UTC', 'GRIB_FORECAST_SECONDS': '3600 sec', 'GRIB_UNIT': '[kg/(m^2 s)]', 'GRIB_PDS_TEMPLATE_NUMBERS': '1 7 2 50 50 0 0 0 0 0 0 0 60 1 0 0 0 0 0 255 255 255 255 255 255', 'GRIB_PDS_PDTN': '0', 'GRIB_COMMENT': 'Precipitation rate [kg/(m^2 s)]', 'GRIB_SHORT_NAME': '0-SFC', 'GRIB_ELEMENT': 'PRATE' } for k in expected_md: if k not in md or md[k] != expected_md[k]: gdaltest.post_reason('Did not get expected metadata') print(md) return 'fail' return 'success'
def _execute(self): """Overwrite this function in subclasses - Create the resource directory - Initialize and create the temporal database and mapset - Export the raster layer - Cleanup """ # Setup the user credentials and logger self._setup() # Create and check the resource directory self.storage_interface.setup() # Check if we have access and create the temporary storage self.required_mapsets.append(self.mapset_name) self._create_temporary_grass_environment( source_mapset_name="PERMANENT") # COG bug in GDAL, see https://github.com/OSGeo/gdal/issues/2946 will # be fixed in GDAL 3.1.4 # use r.out.gdal -c to avoid the bug format = "COG" from osgeo import gdal if "COG" not in [ gdal.GetDriver(i).ShortName for i in range(gdal.GetDriverCount()) ]: format = "GTiff" export_dict = { "name": self.raster_name + "@" + self.mapset_name, "export": { "format": format, "type": "raster" } } self.resource_export_list.append(export_dict) self._export_resources(self.use_raster_region)
def print_gdal_file_formats(): """ List all output file formats """ print("Supported format by GDAL") dcount = gdal.GetDriverCount() for didx in range(dcount): driver = gdal.GetDriver(didx) if driver: metadata = driver.GetMetadata() if (gdal.DCAP_CREATE in (driver and metadata) and metadata[gdal.DCAP_CREATE] == 'YES' and gdal.DCAP_RASTER in metadata and metadata[gdal.DCAP_RASTER] == 'YES'): name = driver.GetDescription() if "DMD_LONGNAME" in metadata: name += ": " + metadata["DMD_LONGNAME"] else: name = driver.GetDescription() if "DMD_EXTENSIONS" in metadata: name += " (" + metadata["DMD_EXTENSIONS"] + ")" print(name)
def GetOutputDriversFor(filename): drv_list = [] ext = GetExtension(filename) for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) if (drv.GetMetadataItem(gdal.DCAP_CREATE) is not None or drv.GetMetadataItem(gdal.DCAP_CREATECOPY) is not None) and \ drv.GetMetadataItem(gdal.DCAP_RASTER) is not None: if ext and DoesDriverHandleExtension(drv, ext): drv_list.append(drv.ShortName) else: prefix = drv.GetMetadataItem(gdal.DMD_CONNECTION_PREFIX) if prefix is not None and filename.lower().startswith(prefix.lower()): drv_list.append(drv.ShortName) # GMT is registered before netCDF for opening reasons, but we want # netCDF to be used by default for output. if ext.lower() == 'nc' and not drv_list and \ drv_list[0].upper() == 'GMT' and drv_list[1].upper() == 'NETCDF': drv_list = ['NETCDF', 'GMT'] return drv_list
def driverList(drivertype='raster'): '''Return the list of available GDAL/OGR drivers''' if not drivertype: types = ['gdal'] elif isinstance(drivertype, string_types): types = [drivertype] else: types = drivertype if not set(('raster', 'vector')).issuperset(types): raise ValueError('invalid type list: "%s"' % types) drivers = [] if 'raster' in types: drivers.extend( gdal.GetDriver(index) for index in range(gdal.GetDriverCount())) if 'vector' in types: # @TODO: check from osgeo import ogr drivers.extend( ogr.GetDriver(index) for index in range(ogr.GetDriverCount())) return drivers
def misc_12(): if int(gdal.VersionInfo('VERSION_NUM')) < 1900: gdaltest.post_reason('would crash') return 'skip' import test_cli_utilities gdal_translate_path = test_cli_utilities.get_gdal_translate_path() for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) md = drv.GetMetadata() if ('DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md) and 'DCAP_RASTER' in md: nbands = 1 if drv.ShortName == 'WEBP' or drv.ShortName == 'ADRG': nbands = 3 datatype = gdal.GDT_Byte if drv.ShortName == 'BT' or drv.ShortName == 'BLX': datatype = gdal.GDT_Int16 elif drv.ShortName == 'GTX' or drv.ShortName == 'NTv2' or drv.ShortName == 'Leveller': datatype = gdal.GDT_Float32 size = 1201 if drv.ShortName == 'BLX': size = 128 src_ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/misc_12_src.tif', size, size, nbands, datatype) set_gt = (2, 1.0 / size, 0, 49, 0, -1.0 / size) src_ds.SetGeoTransform(set_gt) src_ds.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]' ) # Test to detect crashes gdal.PushErrorHandler('CPLQuietErrorHandler') ds = drv.CreateCopy('/nonexistingpath' + get_filename(drv, ''), src_ds) gdal.PopErrorHandler() if ds is None and gdal.GetLastErrorMsg() == '': gdaltest.post_reason('failure') print( 'CreateCopy() into non existing dir fails without error message for driver %s' % drv.ShortName) gdal.Unlink('/vsimem/misc_12_src.tif') return 'fail' ds = None if gdal_translate_path is not None: # Test to detect memleaks ds = gdal.GetDriverByName('VRT').CreateCopy( 'tmp/misc_12.vrt', src_ds) (out, _) = gdaltest.runexternal_out_and_err( gdal_translate_path + ' -of ' + drv.ShortName + ' tmp/misc_12.vrt /nonexistingpath/' + get_filename(drv, ''), check_memleak=False) del ds gdal.Unlink('tmp/misc_12.vrt') # If DEBUG_VSIMALLOC_STATS is defined, this is an easy way # to catch some memory leaks if out.find('VSIMalloc + VSICalloc - VSIFree') != -1 and \ out.find('VSIMalloc + VSICalloc - VSIFree : 0') == -1: if drv.ShortName == 'Rasterlite' and out.find( 'VSIMalloc + VSICalloc - VSIFree : 1') != -1: pass else: print('memleak detected for driver %s' % drv.ShortName) src_ds = None gdal.Unlink('/vsimem/misc_12_src.tif') return 'success'
def misc_6_internal(datatype, nBands, setDriversDone): ds = gdal.GetDriverByName('MEM').Create('', 10, 10, nBands, datatype) if nBands > 0: ds.GetRasterBand(1).Fill(255) ds.SetGeoTransform([2, 1.0 / 10, 0, 49, 0, -1.0 / 10]) ds.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]' ) ds.SetMetadata(['a']) for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) md = drv.GetMetadata() if ('DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md) and 'DCAP_RASTER' in md: # print ('drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) skip = False # FIXME: A few cases that crashes and should be investigated if drv.ShortName == 'JPEG2000': if (nBands == 2 or nBands >= 5) or \ not (datatype == gdal.GDT_Byte or datatype == gdal.GDT_Int16 or datatype == gdal.GDT_UInt16): skip = True if skip is False: dirname = 'tmp/tmp/tmp_%s_%d_%s' % ( drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) try: os.mkdir(dirname) except OSError: try: os.stat(dirname) # Hum the directory already exists... Not expected, but let's try to go on except OSError: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' filename = get_filename(drv, dirname) dst_ds = drv.CreateCopy(filename, ds) has_succeeded = dst_ds is not None dst_ds = None size = 0 stat = gdal.VSIStatL(filename) if stat is not None: size = stat.size try: shutil.rmtree(dirname) except OSError: reason = 'Cannot remove %s after drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' if has_succeeded and drv.ShortName not in setDriversDone and nBands > 0: setDriversDone.add(drv.ShortName) # The first list of drivers fail to detect short writing # The second one is because they are verbose in stderr if 'DCAP_VIRTUALIO' in md and size != 0 and \ drv.ShortName not in ['JPEG2000', 'KMLSUPEROVERLAY', 'HF2', 'ZMap', 'DDS'] and \ drv.ShortName not in ['GIF', 'JP2ECW', 'JP2Lura']: for j in range(10): truncated_size = (size * j) / 10 vsimem_filename = ( '/vsimem/test_truncate/||maxlength=%d||' % truncated_size) + get_filename(drv, '')[1:] # print('drv = %s, nBands = %d, datatype = %s, truncated_size = %d' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype), truncated_size)) dst_ds = drv.CreateCopy(vsimem_filename, ds) error_detected = False if dst_ds is None: error_detected = True else: gdal.ErrorReset() dst_ds = None if gdal.GetLastErrorMsg() != '': error_detected = True if not error_detected: msg = 'write error not decteded with with drv = %s, nBands = %d, datatype = %s, truncated_size = %d' % ( drv.ShortName, nBands, gdal.GetDataTypeName(datatype), truncated_size) print(msg) gdaltest.post_reason(msg) fl = gdal.ReadDirRecursive('/vsimem/test_truncate') if fl is not None: for myf in fl: gdal.Unlink('/vsimem/test_truncate/' + myf) fl = gdal.ReadDirRecursive( '/vsimem/test_truncate') if fl is not None: print(fl) if drv.ShortName not in [ 'ECW', 'JP2ECW', 'VRT', 'XPM', 'JPEG2000', 'FIT', 'RST', 'INGR', 'USGSDEM', 'KMLSUPEROVERLAY', 'GMT' ]: dst_ds = drv.CreateCopy( filename, ds, callback=misc_6_interrupt_callback_class().cbk) if dst_ds is not None: gdaltest.post_reason( 'interruption did not work with drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) dst_ds = None try: shutil.rmtree(dirname) except OSError: pass return 'fail' dst_ds = None try: shutil.rmtree(dirname) except OSError: pass try: os.mkdir(dirname) except OSError: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' ds = None return 'success'
.update({'GoogleV3': 'api_key', # since July 2018 Google requires each request to have an API key 'Bing': 'api_key', 'GeoNames': 'username', 'Yandex': 'api_key', 'MapQuest': 'key', 'Nominatim': None, # using Nominatim with the default geopy is strongly discouraged 'OpenMapQuest': 'api_key'} if _is_geopy_installed else {}) \ or __CODERS # at the end, CODERS will be equal to __CODERS after its updates # default geocoder... but this can be reset when declaring a subclass DEF_CODER = {'Nominatim': None} # {'Bing' : None} DEF_AGENT = PACKNAME DRIVERS = {gdal.GetDriver(i).ShortName: gdal.GetDriver(i).LongName for i in range(gdal.GetDriverCount())} DEF_DRIVER = "GeoJSON" DEF_PROJ = 'WGS84' DEF_PROJ4LL = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' DEF_PROJ4SM = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs' # LATLON = ['lat', 'lon'] # 'coord' # 'latlon' # ORDER = 'lL' # first lat, second Lon DEF_PLACE = ['street', 'number', 'postcode', 'city', 'country'] """Fields used to defined a toponomy (location/place). """
def misc_6_internal(datatype, nBands): if nBands == 0: ds = gdal.GetDriverByName('ILWIS').Create('tmp/tmp.mpl', 100, 100, nBands, datatype) else: ds = gdal.GetDriverByName('MEM').Create('', 10, 10, nBands, datatype) ds.GetRasterBand(1).Fill(255) ds.SetGeoTransform([2, 1.0 / 10, 0, 49, 0, -1.0 / 10]) ds.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]' ) ds.SetMetadata(['a']) if ds is not None: for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) md = drv.GetMetadata() if 'DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md and 'DCAP_RASTER' in md: #print ('drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) skip = False # FIXME: A few cases that crashes and should be investigated if drv.ShortName == 'JPEG2000': if (nBands == 2 or nBands >= 5) or \ not (datatype == gdal.GDT_Byte or datatype == gdal.GDT_Int16 or datatype == gdal.GDT_UInt16): skip = True if drv.ShortName == 'JP2ECW' and datatype == gdal.GDT_Float64: skip = True if skip is False: dirname = 'tmp/tmp/tmp_%s_%d_%s' % ( drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) try: os.mkdir(dirname) except: try: os.stat(dirname) # Hum the directory already exists... Not expected, but let's try to go on except: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' filename = '%s/foo' % dirname if drv.ShortName == 'GTX': filename = filename + '.gtx' elif drv.ShortName == 'RST': filename = filename + '.rst' elif drv.ShortName == 'SAGA': filename = filename + '.sdat' elif drv.ShortName == 'ADRG': filename = '%s/ABCDEF01.GEN' % dirname elif drv.ShortName == 'SRTMHGT': filename = '%s/N48E002.HGT' % dirname elif drv.ShortName == 'ECW': filename = filename + '.ecw' elif drv.ShortName == 'KMLSUPEROVERLAY': filename = filename + '.kmz' dst_ds = drv.CreateCopy(filename, ds) has_succeeded = dst_ds is not None dst_ds = None try: shutil.rmtree(dirname) except: reason = 'Cannot remove %s after drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' if has_succeeded and not drv.ShortName in [ 'ECW', 'JP2ECW', 'VRT', 'XPM', 'JPEG2000', 'FIT', 'RST', 'INGR', 'USGSDEM', 'KMLSUPEROVERLAY', 'GMT' ]: dst_ds = drv.CreateCopy( filename, ds, callback=misc_6_interrupt_callback_class().cbk) if dst_ds is not None: gdaltest.post_reason( 'interruption did not work with drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) dst_ds = None try: shutil.rmtree(dirname) except: pass return 'fail' dst_ds = None try: shutil.rmtree(dirname) except: pass try: os.mkdir(dirname) except: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' ds = None if nBands == 0: gdal.GetDriverByName('ILWIS').Delete('tmp/tmp.mpl') return 'success'
import numpy from osgeo import gdal from osgeo import ogr from osgeo import osr from .. import geoprocessing from ..geoprocessing_core import DEFAULT_GTIFF_CREATION_TUPLE_OPTIONS LOGGER = logging.getLogger('pygeoprocessing.testing.sampledata') ReferenceData = collections.namedtuple('ReferenceData', 'projection origin pixel_size') gdal.AllRegister() GDAL_DRIVERS = sorted([ gdal.GetDriver(i).GetDescription() for i in range(1, gdal.GetDriverCount()) ]) OGR_DRIVERS = sorted( [ogr.GetDriver(i).GetName() for i in range(ogr.GetDriverCount())]) # Mappings of numpy -> GDAL types and GDAL -> numpy types. NUMPY_GDAL_DTYPES = { numpy.byte: gdal.GDT_Byte, numpy.ubyte: gdal.GDT_Byte, numpy.uint16: gdal.GDT_UInt16, numpy.uint32: gdal.GDT_UInt32, numpy.int16: gdal.GDT_Int16, numpy.int32: gdal.GDT_Int32, numpy.int64: gdal.GDT_Int32, # GDAL doesn't have an int6. numpy.float32: gdal.GDT_Float32, numpy.float64: gdal.GDT_Float64,
import warnings import numpy from osgeo import gdal from osgeo import ogr from osgeo import osr LOGGER = logging.getLogger('pygeoprocessing.testing.sampledata') ReferenceData = collections.namedtuple('ReferenceData', 'projection origin pixel_size') gdal.AllRegister() try: GDAL_DRIVERS = sorted([gdal.GetDriver(i).GetDescription() for i in range(1, gdal.GetDriverCount())]) OGR_DRIVERS = sorted([ogr.GetDriver(i).GetName() for i in range(ogr.GetDriverCount())]) except TypeError: # When building InVEST sphinx documentation, gdal.GetDriverCount() doesn't # return an int. GDAL_DRIVERS = OGR_DRIVERS = [] # Higher index in list represents more information is stored by the datatype # Helpful for debug messages, used when creating sample rasters. DTYPES = [ (numpy.byte, gdal.GDT_Byte), (numpy.uint16, gdal.GDT_UInt16), (numpy.uint32, gdal.GDT_UInt32), (numpy.int16, gdal.GDT_Int16),
def getSupportedRasters(self): if self.supportedRasters != None: return self.supportedRasters # first get the GDAL driver manager if gdal.GetDriverCount() == 0: gdal.AllRegister() self.supportedRasters = dict() jp2Driver = None # for each loaded GDAL driver for i in range(gdal.GetDriverCount()): driver = gdal.GetDriver(i) if driver == None: QgsLogger.warning("unable to get driver " + str(i)) continue # now we need to see if the driver is for something currently # supported; if not, we give it a miss for the next driver longName = string.strip(re.sub('\(.*$', '', driver.LongName)) shortName = string.strip(re.sub('\(.*$', '', driver.ShortName)) extensions = '' description = driver.GetDescription() glob = [] metadata = driver.GetMetadata() if metadata.has_key(gdal.DMD_EXTENSION): extensions = str(metadata[gdal.DMD_EXTENSION]) if longName != '': if extensions != '': # XXX add check for SDTS; in that case we want (*CATD.DDF) #TODO fix and test #glob.append( QString("*." + extensions.replace("/", " *.")).split(" ")) glob.append( string.split("*." + string.replace(extensions, "/", " *."), sep=(" "))) # Add only the first JP2 driver found to the filter list (it's the one GDAL uses) if description == "JPEG2000" or description.startswith( "JP2"): # JP2ECW, JP2KAK, JP2MrSID if jp2Driver != None: continue # skip if already found a JP2 driver jp2Driver = driver # first JP2 driver found glob.append("*.j2k") # add alternate extension elif description == "GTiff": glob.append("*.tiff") elif description == "JPEG": glob.append("*.jpeg") else: # USGS DEMs use "*.dem" if description.startswith("USGSDEM"): glob.append("*.dem") elif description.startswith("DTED"): # DTED use "*.dt0" glob.append("*.dt0") elif description.startswith("MrSID"): # MrSID use "*.sid" glob.append("*.sid") else: continue self.supportedRasters[longName] = { 'EXTENSIONS': glob, 'LONGNAME': longName, 'SHORTNAME': shortName, 'DESCRIPTION': description } return self.supportedRasters
def check_config(args): """ Check config and set some defaults if necessary. """ log(f"# Checking configuration") errors = False # Set some defaults defaults = [ ("quiet", False), ("product", "NBAR"), ("obstmp", "/tmp/obs.tif"), ("clipshpfn", "/tmp/clip.json"), ("tmpdir", "/tmp"), ("nocleanup", False), ("gdalconfig", {}), ("models", []), ] for arg, value in defaults: if not arg in args: log(f"'{arg}' not set, setting to default: {arg} = {value}") args[arg] = value # Set GDAL config for k, v in args["gdalconfig"].items(): if v == True: v = "YES" if v == False: v = "NO" gdal.SetConfigOption(k, v) log(f"GDAL option {k} = {v}") ## Check that models exists if not isinstance(args["models"], list): log("'models' must be a list of models") errors = True # Set some model defaults avail = [gdal.GetDriver(i).ShortName for i in range(gdal.GetDriverCount())] # Check the models models = args["models"] for m in models: name = m["name"] if name[:7] == "file://": try: path = name[7:] path, checksum = path.split(":") except ValueError: log(f"Incorrect model name format, it should be file://filename:sha256checksum" ) errors = True if name[:5] == "s3://": try: path = name[5:] path, checksum = path.split(":") except ValueError: log(f"Incorrect model name format, it should be s3://bucket/key:sha256checksum" ) errors = True if "driver" not in m: if gdal.GetDriverByName("COG"): m["driver"] = "COG" else: m["driver"] = "GTiff" if m["driver"] not in avail: log(f"'driver' for model '{name}' is not available") log(f"available drivers: {avail}") errors = True if "inputs" not in m: m["inputs"] = [] # Normalise the urls for ip in m["inputs"]: fn = normalise_url(ip["filename"]) ip["filename"] = fn # Parse tuples as tuples of numbers for models for k, v in m.items(): if isinstance(v, dict): for kk, vv in v.items(): if isinstance( vv, str) and vv.startswith("(") and vv.endswith(")"): v[kk] = astuple(vv) defaults = [ ("output", "result.tif"), ] for arg, value in defaults: if not arg in m.keys(): log(f"Model {name} '{arg}' not set, setting to default: {arg} = {value}" ) m[arg] = value args["models"] = models if errors: sys.exit(1) # TODO: Check GPU resources if needed? return args
def _export_raster(self, raster_name, format="COG", additional_options=[], use_raster_region=False): """Export a specific raster layer with r.out.gdal as GeoTiff. The result is stored in a temporary directory that is located in the temporary grass database. The region of the raster layer can be used for export. In this case a temporary region will be used for export, so that the original region of the mapset is not modified. COG-Driver: https://gdal.org/drivers/raster/cog.html Args: raster_name (str): The name of the raster layer format (str): COG (default; requires GDAL >= 3.1 on server), GTiff additional_options (list): Unused use_raster_region (bool): Use the region of the raster layer for export Returns: tuple: A tuple (file_name, output_path) Raises: AsyncProcessError: If a GRASS module return status is not 0 """ # Export the layer suffix = ".tif" # Remove a potential mapset file_name = raster_name.split("@")[0] + suffix if use_raster_region is True: p = Process(exec_type="grass", executable="g.region", executable_params=["raster=%s" % raster_name, "-g"], id=f"exporter_region_{raster_name}", stdin_source=None) self._update_num_of_steps(1) self._run_module(p) if format == 'COG': # check if GDAL has COG driver from osgeo import gdal driver_list = [ gdal.GetDriver(i).ShortName for i in range(gdal.GetDriverCount()) ] if 'COG' not in driver_list: format = 'GTiff' self.message_logger.info( "COG driver not available, using GTiff driver") # Save the file in the temporary directory of the temporary gisdb output_path = os.path.join(self.temp_file_path, file_name) module_name = "r.out.gdal" args = [ "-fmt", "input=%s" % raster_name, "format=%s" % format, "output=%s" % output_path ] create_opts = "createopt=BIGTIFF=YES,COMPRESS=LZW" if format == "GTiff": # generate overviews with compression: os.environ['COMPRESS_OVERVIEW'] = "LZW" args.append("overviews=5") create_opts += ",TILED=YES" args.append(create_opts) # current workaround due to color table export # COG bug in GDAL, see https://github.com/OSGeo/gdal/issues/2946 # TODO: DELETE AND TEST ONCE GDAL 3.1.4 HAS BEEN RELEASED if format == "COG": args.append("-c") if additional_options: args.extend(additional_options) p = Process(exec_type="grass", executable=module_name, executable_params=args, id=f"exporter_raster_{raster_name}", stdin_source=None) self._update_num_of_steps(1) self._run_module(p) return file_name, output_path
# -*- coding: utf-8 -*- # @Time : 2020/7/6 13:31 # @Author : ding # @File : 1zhang.py import os from osgeo import ogr, gdal, osr from osgeo import gdalconst import shapely import numpy as np # gdal.AllRegister() 一次性注册所有驱动,只能读不能写。 driver_gdal = gdal.GetDriverByName('Gtiff') driver_gdal.Register() driver_all = gdal.GetDriverCount() print(driver_all) changsha_tif = gdal.Open( r'E:\gistest\data\430000HN\430000HN_L5_TM_1990\430000HN_L5_TM_1990_R1C1.TIF' ) print(dir(changsha_tif)) # 查看可操作 print(changsha_tif.GetDescription()) # 返回路径信息 print(changsha_tif.RasterCount) # 数据集上的波段数,每一个波段都是一个数据集。 print(changsha_tif.RasterXSize) print(changsha_tif.RasterYSize) # y方向上的像元个数 print(changsha_tif.GetProjection()) # 投影 print(changsha_tif.GetGeoTransform()) # 六参数 # 读取栅格数据的元数据 print(changsha_tif.GetMetadata())
def detect_ogr_driver( ogr_ds: str, name_only: bool = False) -> typing.Tuple[gdal.Driver, str]: """ Autodetect the appropriate driver for an OGR datasource. Parameters ---------- ogr_ds : OGR datasource Path to OGR datasource. name_only : bool True to return the name of the driver, else the ogr.Driver object will be return Returns ------- OGR driver OR OGR driver, layer name """ # Driver names FileGDB = 'FileGDB' OpenFileGDB = 'OpenFileGDB' # Suffixes GPKG = '.gpkg' SHP = '.shp' GEOJSON = '.geojson' GDB = '.gdb' supported_drivers = [ gdal.GetDriver(i).GetDescription() for i in range(gdal.GetDriverCount()) ] if FileGDB in supported_drivers: gdb_driver = FileGDB else: gdb_driver = OpenFileGDB # OGR driver lookup table driver_lut = { GEOJSON: 'GeoJSON', SHP: 'ESRI Shapefile', GPKG: 'GPKG', GDB: gdb_driver } layer = None # Check if in-memory datasource if isinstance(ogr_ds, PurePath): ogr_ds = str(ogr_ds) if isinstance(ogr_ds, ogr.DataSource): driver = 'Memory' elif 'vsimem' in ogr_ds: driver = 'ESRI Shapefile' else: # Check if extension in look up table if GPKG in ogr_ds: drv_sfx = GPKG layer = Path(ogr_ds).stem elif GDB in ogr_ds: drv_sfx = GDB layer = Path(ogr_ds).stem else: drv_sfx = Path(ogr_ds).suffix if drv_sfx in driver_lut.keys(): driver = driver_lut[drv_sfx] else: logger.warning("""Unsupported driver extension {} Defaulting to 'ESRI Shapefile'""".format(drv_sfx)) driver = driver_lut[SHP] logger.debug('Driver autodetected: {}'.format(driver)) if not name_only: try: driver = ogr.GetDriverByName(driver) except ValueError as e: logger.error('ValueError with driver_name: {}'.format(driver)) logger.error('OGR DS: {}'.format(ogr_ds)) raise e return driver, layer
def has_jp2kdrv(): for i in range(gdal.GetDriverCount()): if gdal.GetDriver(i).ShortName.startswith('JP2'): return True return False
def export(world, export_filetype='GTiff', export_datatype='float32', path='seed_output'): try: gdal except NameError: print("Cannot export: please install pygdal.") sys.exit(1) gdal.AllRegister() for i in range(1, gdal.GetDriverCount()): drv = gdal.GetDriver(i) sTmp = drv.GetDescription() print(sTmp) final_driver = gdal.GetDriverByName(export_filetype) metadata = final_driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE) and metadata[gdal.DCAP_CREATE] == 'YES': print('Driver %s supports Create() method.' % final_driver) if metadata.has_key( gdal.DCAP_CREATECOPY) and metadata[gdal.DCAP_CREATECOPY] == 'YES': print('Driver %s supports CreateCopy() method.' % final_driver) if final_driver is None: print("%s driver not registered." % export_filetype) sys.exit(1) # try to find the proper file-suffix export_filetype = export_filetype.lower() if export_filetype in gdal_mapper: export_filetype = gdal_mapper[export_filetype] # Note: GDAL will throw informative errors on its own whenever file type and data type cannot be matched. # translate export_datatype; http://www.gdal.org/gdal_8h.html#a22e22ce0a55036a96f652765793fb7a4 export_datatype = export_datatype.lower() if export_datatype in ['gdt_byte', 'uint8', 'int8', 'byte', 'char']: # GDAL does not support int8 bpp, signed, normalize = (8, False, True) numpy_type = numpy.uint8 gdal_type = gdal.GDT_Byte elif export_datatype in ['gdt_uint16', 'uint16']: bpp, signed, normalize = (16, False, True) numpy_type = numpy.uint16 gdal_type = gdal.GDT_UInt16 elif export_datatype in ['gdt_uint32', 'uint32']: bpp, signed, normalize = (32, False, True) numpy_type = numpy.uint32 gdal_type = gdal.GDT_UInt32 elif export_datatype in ['gdt_int16', 'int16']: bpp, signed, normalize = (16, True, True) numpy_type = numpy.int16 gdal_type = gdal.GDT_Int16 elif export_datatype in ['gdt_int32', 'int32', 'int']: # fallback for 'int' bpp, signed, normalize = (32, True, True) numpy_type = numpy.int32 gdal_type = gdal.GDT_Int32 elif export_datatype in ['gdt_float32', 'float32', 'float']: # fallback for 'float' bpp, signed, normalize = (32, True, False) numpy_type = numpy.float32 gdal_type = gdal.GDT_Float32 elif export_datatype in ['gdt_float64', 'float64']: bpp, signed, normalize = (64, True, False) numpy_type = numpy.float64 gdal_type = gdal.GDT_Float64 else: raise TypeError( "Type of data not recognized or not supported by GDAL: %s" % export_datatype) # massage data to scale between the absolute min and max elevation = numpy.copy(world.layers['elevation'].data) # shift data according to minimum possible value if signed: elevation = elevation - world.sea_level( ) # elevation 0.0 now refers to sea-level else: elevation -= elevation.min() # lowest point at 0.0 # rescale data (currently integer-types only) if normalize: # elevation maps usually have a range of 0 to 10, maybe 15 - rescaling for integers is essential if signed: elevation *= (2**(bpp - 1) - 1) / max(abs(elevation.min()), abs(elevation.max())) else: elevation *= (2**bpp - 1) / abs(elevation.max()) # round data (integer-types only) if numpy_type != numpy.float32 and numpy_type != numpy.float64: elevation = elevation.round() # switch to final data type; no rounding performed elevation = elevation.astype(numpy_type) # take elevation data and push it into an intermediate GTiff format (some formats don't support being written by Create()) inter_driver = gdal.GetDriverByName("GTiff") fh_inter_file, inter_file = tempfile.mkstemp( ) # returns: (file-handle, absolute path) initial_ds = inter_driver.Create(inter_file, world.width, world.height, 1, gdal_type) band = initial_ds.GetRasterBand(1) band.WriteArray(elevation) band = None # dereference band initial_ds = None # save/flush and close # take the intermediate GTiff format and convert to final format initial_ds = gdal.Open(inter_file) final_driver.CreateCopy('%s-%d.%s' % (path, bpp, export_filetype), initial_ds) initial_ds = None os.close(fh_inter_file) os.remove(inter_file)
def UsageFormat(): print('Valid formats:') count = gdal.GetDriverCount() for index in range(count): driver = gdal.GetDriver(index) print(driver.ShortName)
"The default is not to copy the original selected 10m bands into the output file in addition " "to the super-resolved bands. If this flag is used, the output file may be used as a 10m " "version of the original Sentinel-2 file.") parser.add_argument( "--save_prefix", default="", help= "If set, speficies the name of a prefix for all output files. Use a trailing / to save into a " "directory. The default of no prefix will save into the current directory. " "Example: --save_prefix result/") args = parser.parse_args() globals().update(args.__dict__) if list_output_file_formats: dcount = gdal.GetDriverCount() for didx in range(dcount): driver = gdal.GetDriver(didx) if driver: metadata = driver.GetMetadata() if (gdal.DCAP_CREATE in (driver and metadata) and metadata[gdal.DCAP_CREATE] == 'YES' and gdal.DCAP_RASTER in metadata and metadata[gdal.DCAP_RASTER] == 'YES'): name = driver.GetDescription() if "DMD_LONGNAME" in metadata: name += ": " + metadata["DMD_LONGNAME"] else: name = driver.GetDescription() if "DMD_EXTENSIONS" in metadata: name += " (" + metadata["DMD_EXTENSIONS"] + ")"
def getSupportedRasters(self): if self.supportedRasters != None: return self.supportedRasters # first get the GDAL driver manager if gdal.GetDriverCount() == 0: gdal.AllRegister() self.supportedRasters = dict() jp2Driver = None # for each loaded GDAL driver for i in range(gdal.GetDriverCount()): driver = gdal.GetDriver(i) if driver == None: QgsLogger.warning("unable to get driver " + QString.number(i)) continue # now we need to see if the driver is for something currently # supported; if not, we give it a miss for the next driver longName = QString(driver.LongName).remove( QRegExp('\(.*$')).trimmed() shortName = QString(driver.ShortName).remove( QRegExp('\(.*$')).trimmed() extensions = QString() description = QString(driver.GetDescription()) glob = QStringList() metadata = driver.GetMetadata() if metadata.has_key(gdal.DMD_EXTENSION): extensions = QString(metadata[gdal.DMD_EXTENSION]) if not longName.isEmpty(): if not extensions.isEmpty(): # XXX add check for SDTS; in that case we want (*CATD.DDF) glob << QString("*." + extensions.replace("/", " *.")).split(" ") # Add only the first JP2 driver found to the filter list (it's the one GDAL uses) if description == "JPEG2000" or description.startsWith( "JP2"): # JP2ECW, JP2KAK, JP2MrSID if jp2Driver != None: continue # skip if already found a JP2 driver jp2Driver = driver # first JP2 driver found glob << "*.j2k" # add alternate extension elif description == "GTiff": glob << "*.tiff" elif description == "JPEG": glob << "*.jpeg" else: # USGS DEMs use "*.dem" if description.startsWith("USGSDEM"): glob << "*.dem" elif description.startsWith("DTED"): # DTED use "*.dt0" glob << "*.dt0" elif description.startsWith("MrSID"): # MrSID use "*.sid" glob << "*.sid" else: continue self.supportedRasters[longName] = { 'EXTENSIONS': glob, 'LONGNAME': longName, 'SHORTNAME': shortName, 'DESCRIPTION': description } return self.supportedRasters
def test_pythondrivers_unsupported_api_version(): count_before = gdal.GetDriverCount() with gdaltest.config_option('GDAL_PYTHON_DRIVER_PATH', 'data/pydrivers/unsupportedapiversion'): gdal.AllRegister() assert gdal.GetDriverCount() == count_before
def misc_12(): if int(gdal.VersionInfo('VERSION_NUM')) < 1900: gdaltest.post_reason('would crash') return 'skip' import test_cli_utilities gdal_translate_path = test_cli_utilities.get_gdal_translate_path() for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) #if drv.ShortName == 'ECW' or drv.ShortName == 'JP2ECW': # continue md = drv.GetMetadata() if 'DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md and 'DCAP_RASTER' in md: ext = '' if drv.ShortName == 'GTX': ext = '.gtx' elif drv.ShortName == 'RST': ext = '.rst' elif drv.ShortName == 'SAGA': ext = '.sdat' elif drv.ShortName == 'ECW': ext = '.ecw' elif drv.ShortName == 'KMLSUPEROVERLAY': ext = '.kmz' elif drv.ShortName == 'ADRG': ext = '/ABCDEF01.GEN' elif drv.ShortName == 'SRTMHGT': ext = '/N48E002.HGT' nbands = 1 if drv.ShortName == 'WEBP' or drv.ShortName == 'ADRG': nbands = 3 datatype = gdal.GDT_Byte if drv.ShortName == 'BT' or drv.ShortName == 'BLX': datatype = gdal.GDT_Int16 elif drv.ShortName == 'GTX' or drv.ShortName == 'NTv2' or drv.ShortName == 'Leveller': datatype = gdal.GDT_Float32 size = 1201 if drv.ShortName == 'BLX': size = 128 src_ds = gdal.GetDriverByName('GTiff').Create( '/vsimem/misc_12_src.tif', size, size, nbands, datatype) set_gt = (2, 1.0 / size, 0, 49, 0, -1.0 / size) src_ds.SetGeoTransform(set_gt) src_ds.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]' ) # Test to detect crashes gdal.PushErrorHandler('CPLQuietErrorHandler') ds = drv.CreateCopy('/nonexistingpath/nonexistingfile' + ext, src_ds) gdal.PopErrorHandler() ds = None if gdal_translate_path is not None: # Test to detect memleaks ds = gdal.GetDriverByName('VRT').CreateCopy( 'tmp/misc_12.vrt', src_ds) (out, err) = gdaltest.runexternal_out_and_err( gdal_translate_path + ' -of ' + drv.ShortName + ' tmp/misc_12.vrt /nonexistingpath/nonexistingfile' + ext, check_memleak=False) ds = None gdal.Unlink('tmp/misc_12.vrt') # If DEBUG_VSIMALLOC_STATS is defined, this is an easy way # to catch some memory leaks if out.find('VSIMalloc + VSICalloc - VSIFree') != -1 and \ out.find('VSIMalloc + VSICalloc - VSIFree : 0') == -1: if drv.ShortName == 'Rasterlite' and out.find( 'VSIMalloc + VSICalloc - VSIFree : 1') != -1: pass else: print('memleak detected for driver %s' % drv.ShortName) src_ds = None gdal.Unlink('/vsimem/misc_12_src.tif') return 'success'