def raster_size(ds, aux_bands=0): # Multiple types not supported, so get first band type data_type = ds.GetRasterBand(1).DataType data_type_bytes = gdal.GetDataTypeSize(data_type) // 8 size = ds.RasterXSize * ds.RasterYSize * data_type_bytes * ( ds.RasterCount + aux_bands) return size
def __init__(self, filename): # Attempt to open the dataset self._dataset = openDataset(filename) # Retrieve the image dimensions self._width = self._dataset.RasterXSize self._height = self._dataset.RasterYSize self._channels = self._dataset.RasterCount # Create our `shape` attribute if self._channels > 1: self.shape = (self._height, self._width, self._channels) else: self.shape = (self._height, self._width) # Determine the number of bytes required to store the raster data dtype = self._dataset.GetRasterBand(1).DataType requiredBytes = self._width * self._height * self._channels * gdal.GetDataTypeSize( dtype) # Determine if there is sufficient available memory to store the raster data vmem = psutil.virtual_memory() safetyBuffer = 100 * 1024 * 1024 if (vmem.available - safetyBuffer) > requiredBytes: self._raster = rasterFromDataset(self._dataset) else: self._raster = None
def CreateBILRawRasterVRT(filename, nbands, cols, rows, datatype, nbits, nodata=None, headeroffset=0, byteorder=None, relativeToVRT=0): '''Create RawRaster VRT from a BIL BIL = Band-Interleaved-by-Line or Row-Interleaved For further info on VRT's, see the U{GDAL VRT Tutorial<http://www.gdal.org/gdal_vrttut.html>} @type filename: C{str} @param filename: File name @type nbands: C{int} @param nbands: The number of bands in the output VRT (<= nbands in input file) @type cols: C{int} @param cols: The number of columns in the output VRT @type rows: C{int} @param rows: The number of rows in the output VRT @type datatype: C{str} @param datatype: GDAL datatype name. Eg. Byte, Int32, UInt16 @type nodata: C{float} @param nodata: No data/Null value @type headeroffset: C{int} @param headeroffset: Number of bytes to skip at the start of the file @type byteorder: C{str} @param byteorder: Byte order of the file (MSB or LSB) @rtype: C{xml} @return: VRT XML string ''' try: vrt = [] nbits = gdal.GetDataTypeSize(gdal.GetDataTypeByName(datatype)) for i in range(nbands): vrt.append( ' <VRTRasterBand dataType="%s" band="%s" subClass="VRTRawRasterBand">' % (datatype, i + 1)) vrt.append( ' <SourceFilename relativeToVRT="%s">%s</SourceFilename>' % (relativeToVRT, filename)) if nodata is not None: vrt.append(' <NoDataValue>%s</NoDataValue>' % (nodata)) #Fix for Issue 17 vrt.append(' <ImageOffset>%s</ImageOffset>' % (headeroffset + nbits / 8 * i * cols)) vrt.append(' <PixelOffset>%s</PixelOffset>' % (nbits / 8)) vrt.append(' <LineOffset>%s</LineOffset>' % (nbits / 8 * cols)) if byteorder: vrt.append(' <ByteOrder>%s</ByteOrder>' % (byteorder)) vrt.append(' </VRTRasterBand>') return CreateCustomVRT('\n'.join(vrt), cols, rows) except: return None return '\n'.join(vrt)
def bytes_per_pixel(self, band=0): """ Returns ------- int: the number of bytes per pixel """ self.__asert_open() return gdal.GetDataTypeSize(self._gdal_type(band)) // 8
def gettiledata(self, tile_num): #print('Reading tile %d' % tile_num) tile_y = tile_num // rast.tile_x_count tile_x = tile_num - tile_y * rast.tile_x_count xoff = tile_x * rast.tile_width yoff = tile_y * rast.tile_height xsize = rast.tile_width ysize = rast.tile_height if xoff + xsize > rast.width: xsize = rast.width - xoff if yoff + ysize > rast.height: ysize = rast.height - yoff dt = rast.datatype dtsize = gdal.GetDataTypeSize(dt) // 8 buf_pixel_space = dtsize * rast.num_bands buf_line_space = buf_pixel_space * rast.tile_width buf_band_space = dtsize if rast.num_bands > 1 else None if xsize < rast.tile_width or ysize < rast.tile_height: if gdal_3_3: buf_obj = bytearray(b'\x00' * self.tilesize()) buf_obj = rast.ds.ReadRaster(xoff, yoff, xsize, ysize, buf_obj=buf_obj, buf_xsize=xsize, buf_ysize=ysize, buf_pixel_space=buf_pixel_space, buf_line_space=buf_line_space, buf_band_space=buf_band_space) else: buf_obj = rast.ds.ReadRaster(xoff, yoff, xsize, ysize) tmp_ds = gdal.GetDriverByName('MEM').Create( '', rast.tile_width, rast.tile_height, rast.num_bands, dt) tmp_ds.WriteRaster(0, 0, xsize, ysize, buf_obj) buf_obj = tmp_ds.ReadRaster(0, 0, rast.tile_width, rast.tile_height, buf_pixel_space=buf_pixel_space, buf_line_space=buf_line_space, buf_band_space=buf_band_space) else: buf_obj = rast.ds.ReadRaster(xoff, yoff, xsize, ysize, buf_pixel_space=buf_pixel_space, buf_line_space=buf_line_space, buf_band_space=buf_band_space) return buf_obj
def CreateRawRasterVRT(bands, cols, rows, datatype, headeroffset=0, byteorder=None, relativeToVRT=0, nodata=None): ''' Create RawRaster VRT from one or more _single_ band files For further info on VRT's, see the U{GDAL VRT Tutorial<http://www.gdal.org/gdal_vrttut.html>} @type bands: C{[str,...,str]} @param bands: List of files. The first file becomes the first band and so forth. @type cols: C{int} @param cols: The number of columns in the output VRT @type rows: C{int} @param rows: The number of rows in the output VRT @type datatype: C{str} @param datatype: GDAL datatype name. Eg. Byte, Int32, UInt16 @type headeroffset: C{int} @param headeroffset: Number of bytes to skip at the start of the file @type byteorder: C{str} @param byteorder: Byte order of the file (MSB or LSB) @rtype: C{xml} @return: VRT XML string ''' try: vrt = [] nbits = gdal.GetDataTypeSize(gdal.GetDataTypeByName(datatype)) for i, band in enumerate(bands): vrt.append( ' <VRTRasterBand dataType="%s" band="%s" subClass="VRTRawRasterBand">' % (datatype, i + 1)) vrt.append( ' <SourceFilename relativeToVRT="%s">%s</SourceFilename>' % (relativeToVRT, band)) vrt.append(' <ImageOffset>%s</ImageOffset>' % (headeroffset)) vrt.append(' <PixelOffset>%s</PixelOffset>' % (nbits / 8)) vrt.append(' <LineOffset>%s</LineOffset>' % (nbits / 8 * cols)) if nodata is not None: vrt.append(' <NoDataValue>%s</NoDataValue>' % (nodata)) #Fix for Issue 17 if byteorder: vrt.append(' <ByteOrder>%s</ByteOrder>' % (byteorder)) vrt.append(' </VRTRasterBand>') return CreateCustomVRT('\n'.join(vrt), cols, rows) except: return None return '\n'.join(vrt)
def get_chunks_from_raster_envelope(rds, re, max_size=250000000): """ A generator which returns all 'chunks' for a RasterDataset within a raster envelope using whole rows. The largest group of rows that is under max_size is returned Parameters ---------- rds : RasterDataset A raster band from a valid gdal.Dataset re : RasterEnvelope A raster envelope which should be a subset of the band window Keywords -------- max_size : int The maximum size (in bytes) of the chunk to be returned Yields ------ out : RasterBlock instance The current raster chunk within the band """ # Get the offsets from the rds envelope to the re envelope rd_env = rds.env rd_x_off = int((re.x_min - rd_env.x_min) / rd_env.cell_size) rd_y_off = int((rd_env.y_max - re.y_max) / rd_env.cell_size) # Calculate the size of a row's worth of data. All chunks should be # complete rows. x_chunk_size = re.n_cols x_offset = rd_x_off num_bytes_row = x_chunk_size * gdal.GetDataTypeSize(rds.data_type) # Now calculate how many rows to retrieve per call y_chunk_size = int(math.floor(float(max_size) / num_bytes_row)) # Figure out how many batches this represents y_grid_size = re.n_rows y_chunks = (y_grid_size + (y_chunk_size - 1)) / y_chunk_size for j in xrange(y_chunks): if (j + 1) * y_chunk_size <= y_grid_size: y_size = y_chunk_size else: y_size = y_grid_size - (j * y_chunk_size) y_offset = rd_y_off + (j * y_chunk_size) yield RasterBlock(rds.rb, x_chunk_size, y_size, x_offset, y_offset)
def __init__(self, ds): self.ds = ds self.width = self.ds.RasterXSize self.height = self.ds.RasterYSize self.datatype = self.ds.GetRasterBand(1).DataType self.bitspersample = gdal.GetDataTypeSize(self.datatype) self.num_bands = self.ds.RasterCount self.tile_width = 512 self.tile_height = 512 self.tile_x_count = (self.width + self.tile_width - 1) // self.tile_width self.tile_y_count = (self.height + self.tile_height - 1) // self.tile_height self.tile_count = self.tile_x_count * self.tile_y_count
def _calculate_scale_offset(nodata, band): """ This method comes from the old ULA codebase. """ nbits = gdal.GetDataTypeSize(band.DataType) df_scale_dst_min, df_scale_dst_max = 0.0, 255.0 if nbits == 16: count = 32767 + nodata histogram = band.GetHistogram(-32767, 32767, 65536) else: count = 0 histogram = band.GetHistogram() df_scale_src_min = count total = 0 cliplower = int(0.01 * (sum(histogram) - histogram[count])) clipupper = int(0.99 * (sum(histogram) - histogram[count])) while total < cliplower and count < len(histogram) - 1: count += 1 total += int(histogram[count]) df_scale_src_min = count if nbits == 16: count = 32767 + nodata else: count = 0 total = 0 df_scale_src_max = count while total < clipupper and count < len(histogram) - 1: count += 1 total += int(histogram[count]) df_scale_src_max = count if nbits == 16: df_scale_src_min -= 32768 df_scale_src_max -= 32768 # Determine gain and offset diff_ = df_scale_src_max - df_scale_src_min # From the old Jobmanager codebase: avoid divide by zero caused by some stats. if diff_ == 0: _LOG.warning("dfScaleSrc Min and Max are equal! Applying correction") diff_ = 1 df_scale = (df_scale_dst_max - df_scale_dst_min) / diff_ df_offset = -1 * df_scale_src_min * df_scale + df_scale_dst_min return df_scale, df_offset
def GetDataTypeRange(datatype): ''' Calculate data type range @type datatype: C{int} @param datatype: gdal data type constant @rtype: C{[int,int]} @return: Min and max value for data type ''' nbits = gdal.GetDataTypeSize(datatype) datatype = gdal.GetDataTypeName(datatype) if datatype[0:4] in ['Byte', 'UInt']: dfScaleSrcMin = 0 #Unsigned dfScaleSrcMax = 2**(nbits) - 1 else: dfScaleSrcMin = -2**(nbits - 1) #Signed dfScaleSrcMax = 2**(nbits - 1) - 1 return (dfScaleSrcMin, dfScaleSrcMax)
def __opendataset__(self): bandlookup = { 'pan': ['pan'], 'blu': ['blu'], 'grn': ['grn'], 'red': ['red'], 'nir': ['nir'], 'bgrn': ['blu', 'grn', 'red', 'nir'], 'bgr': ['blu', 'grn', 'red'], 'rgb': ['red', 'grn', 'blu'] } bandfiles = {} bandnames = [] for d in self._datafiles: band = d.split('_')[2] bands = bandlookup.get(band, band) for band in bands: bandfiles[band] = os.path.join( os.path.dirname(self.fileinfo['filepath']), d) bandnames += [band] try: f = bandfiles['red'] rgb = True except: f = bandfiles['pan'] rgb = False ds = geometry.OpenDataset(f) rb = ds.GetRasterBand(1) cols = ds.RasterXSize rows = ds.RasterYSize datatype = gdal.GetDataTypeName(rb.DataType) nbits = gdal.GetDataTypeSize(rb.DataType) nbands = len(bandnames) bandnames = ','.join(bandnames) if rgb and bandfiles['red'] != bandfiles['blu']: ds = geometry.OpenDataset( geometry.CreateSimpleVRT( [bandfiles['red'], bandfiles['grn'], bandfiles['blu']], cols, rows, datatype)) return ds, nbands, bandnames, f
def _make_tmp_vrt(self, filename, data=None, relativeToVRT=False): dirname = os.path.dirname(filename) os.makedirs(dirname, exist_ok=True) old_dirname = os.getcwd() if relativeToVRT: filename = os.path.basename(filename) os.chdir(dirname) try: driver = gdal.GetDriverByName('VRT') ds = driver.Create(filename, self.XSIZE, self.YSIZE, 0) if relativeToVRT: srcpath = os.path.basename(filename) + '.raw' else: srcpath = filename + '.raw' if self.data is not None: gdtype = NumericTypeCodeToGDALTypeCode(data.dtype) else: gdtype = gdal.GDT_Byte pixel_offset = gdal.GetDataTypeSize(gdtype) // 8 options = { 'subClass': 'VRTRawRasterBand', 'SourceFilename': srcpath, 'ImageOffset': 0, 'PixelOffset': pixel_offset, 'LineOffset': pixel_offset * self.XSIZE, 'relativeToVRT': int(relativeToVRT), } options = ['{}={}'.format(k, v) for k, v in options.items()] ds.AddBand(gdtype, options) if self.data is not None: b = ds.GetRasterBand(1) b.WriteArray(data) ds.FlushCache() finally: os.chdir(old_dirname)
def get_chunks(band, max_size=250000000): """ A generator which returns all 'chunks' for a gdal.Band using whole rows. The largest group of rows that is under max_size is returned Parameters ---------- band : gdal.Band A raster band from a valid gdal.Dataset Keywords -------- max_size : int The maximum size (in bytes) of the chunk to be returned Yields ------ out : RasterBlock instance The current raster chunk within the band """ # Calculate the size of a row's worth of data. All chunks should be # complete rows. x_chunk_size = band.XSize x_offset = 0 num_bytes_row = x_chunk_size * gdal.GetDataTypeSize(band.DataType) # Now calculate how many rows to retrieve per call y_chunk_size = int(math.floor(float(max_size) / num_bytes_row)) # Figure out how many batches this represents y_grid_size = band.YSize y_chunks = (y_grid_size + (y_chunk_size - 1)) / y_chunk_size for j in xrange(y_chunks): if (j + 1) * y_chunk_size <= y_grid_size: y_size = y_chunk_size else: y_size = y_grid_size - (j * y_chunk_size) y_offset = j * y_chunk_size yield RasterBlock(band, x_chunk_size, y_size, x_offset, y_offset)
def my_pyDerivedPixelFunc(papoSources, nSources, pData, nBufXSize, nBufYSize, eSrcType, eBufType, nPixelSpace, nLineSpace): if nSources != 1: print(nSources) print('did not get expected nSources') return 1 srcctype = GDALTypeToCTypes(eSrcType) if srcctype is None: print(eSrcType) print('did not get expected eSrcType') return 1 dstctype = GDALTypeToCTypes(eBufType) if dstctype is None: print(eBufType) print('did not get expected eBufType') return 1 if nPixelSpace != gdal.GetDataTypeSize(eBufType) / 8: print(nPixelSpace) print('did not get expected nPixelSpace') return 1 if (nLineSpace % nPixelSpace) != 0: print(nLineSpace) print('did not get expected nLineSpace') return 1 nLineStride = (int)(nLineSpace / nPixelSpace) srcValues = ctypes.cast(papoSources[0], ctypes.POINTER(srcctype)) dstValues = ctypes.cast(pData, ctypes.POINTER(dstctype)) for j in range(nBufYSize): for i in range(nBufXSize): dstValues[j * nLineStride + i] = srcValues[j * nBufXSize + i] return 0
def main_loop(): server_ds = None server_bands = [] gdal.SetConfigOption('GDAL_API_PROXY', 'NO') while 1: sys.stdout.flush() instr = read_int() if VERBOSE: sys.stderr.write('instr=%d\n' % instr) band = None if instr >= INSTR_Band_First and instr <= INSTR_Band_End: srv_band = read_int() band = server_bands[srv_band] if instr == INSTR_GetGDALVersion: if sys.version_info >= (3, 0, 0): lsb = struct.unpack('B', sys.stdin.read(1).encode('latin1'))[0] else: lsb = struct.unpack('B', sys.stdin.read(1))[0] ver = read_str() vmajor = read_int() vminor = read_int() protovmajor = read_int() protovminor = read_int() extra_bytes = read_int() if VERBOSE: sys.stderr.write('lsb=%d\n' % lsb) sys.stderr.write('ver=%s\n' % ver) sys.stderr.write('vmajor=%d\n' % vmajor) sys.stderr.write('vminor=%d\n' % vminor) sys.stderr.write('protovmajor=%d\n' % protovmajor) sys.stderr.write('protovminor=%d\n' % protovminor) sys.stderr.write('extra_bytes=%d\n' % extra_bytes) write_str('2.1dev') write_int(2) # vmajor write_int(1) # vminor write_int(3) # protovmajor write_int(0) # protovminor write_int(0) # extra bytes continue elif instr == INSTR_EXIT: server_ds = None server_bands = [] write_marker() write_int(1) sys.exit(0) elif instr == INSTR_EXIT_FAIL: server_ds = None server_bands = [] write_marker() write_int(1) sys.exit(1) elif instr == INSTR_SetConfigOption: key = read_str() val = read_str() gdal.SetConfigOption(key, val) if VERBOSE: sys.stderr.write('key=%s\n' % key) sys.stderr.write('val=%s\n' % val) continue elif instr == INSTR_Reset: # if server_ds is not None: # sys.stderr.write('Reset(%s)\n' % server_ds.GetDescription()) server_ds = None server_bands = [] write_marker() write_int(1) elif instr == INSTR_Open: access = read_int() filename = read_str() cwd = read_str() open_options = read_strlist() if cwd is not None: os.chdir(cwd) if VERBOSE: sys.stderr.write('access=%d\n' % access) sys.stderr.write('filename=%s\n' % filename) sys.stderr.write('cwd=%s\n' % cwd) sys.stderr.write('open_options=%s\n' % str(open_options)) # sys.stderr.write('Open(%s)\n' % filename) try: server_ds = GDALPythonServerDataset(filename, access, open_options) except: server_ds = None write_marker() if server_ds is None: write_int(0) # Failure else: write_int(1) # Success write_int(16) # caps length caps = [0 for i in range(16)] for cap in caps_list: caps[int(cap / 8)] = caps[int(cap / 8)] | (1 << (cap % 8)) for i in range(16): sys.stdout.write(struct.pack('B', caps[i])) # caps write_str(server_ds.GetDescription()) drv = server_ds.GetDriver() if drv is not None: write_str(drv.GetDescription()) write_int(0) # End of driver metadata else: write_str(None) write_int(server_ds.RasterXSize) # X write_int(server_ds.RasterYSize) # Y write_int(server_ds.RasterCount) # Band count write_int(1) # All bands are identical if server_ds.RasterCount > 0: write_band(server_ds.GetRasterBand(1), len(server_bands)) for i in range(server_ds.RasterCount): server_bands.append(server_ds.GetRasterBand(i + 1)) elif instr == INSTR_Identify: filename = read_str() cwd = read_str() dr = gdal.IdentifyDriver(filename) write_marker() if dr is None: write_int(0) else: write_int(1) elif instr == INSTR_Create: filename = read_str() cwd = read_str() read_int() # xsize = read_int() # ysize = read_int() # bands = read_int() # datatype = read_strlist() # options = write_marker() # FIXME write_int(0) elif instr == INSTR_CreateCopy: filename = read_str() read_str() # src_description = cwd = read_str() read_int() # strict = read_strlist() # options = # FIXME write_int(0) elif instr == INSTR_QuietDelete: filename = read_str() cwd = read_str() write_marker() # FIXME elif instr == INSTR_GetGeoTransform: gt = server_ds.GetGeoTransform() write_marker() if gt is not None: write_int(CE_None) write_int(6 * 8) for i in range(6): write_double(gt[i]) else: write_int(CE_Failure) elif instr == INSTR_GetProjectionRef: write_marker() write_str(server_ds.GetProjectionRef()) elif instr == INSTR_GetGCPCount: write_marker() write_int(server_ds.GetGCPCount()) elif instr == INSTR_GetFileList: write_marker() fl = server_ds.GetFileList() write_int(len(fl)) for f in fl: write_str(f) elif instr == INSTR_GetMetadata: domain = read_str() md = server_ds.GetMetadata(domain) write_marker() write_int(len(md)) for key in md: write_str('%s=%s' % (key, md[key])) elif instr == INSTR_GetMetadataItem: key = read_str() domain = read_str() val = server_ds.GetMetadataItem(key, domain) write_marker() write_str(val) elif instr == INSTR_IRasterIO_Read: nXOff = read_int() nYOff = read_int() nXSize = read_int() nYSize = read_int() nBufXSize = read_int() nBufYSize = read_int() nBufType = read_int() nBandCount = read_int() panBandMap = [] read_int() # size = for i in range(nBandCount): panBandMap.append(read_int()) nPixelSpace = read_bigint() nLineSpace = read_bigint() nBandSpace = read_bigint() val = server_ds.IRasterIO_Read(nXOff, nYOff, nXSize, nYSize, nBufXSize, nBufYSize, nBufType, panBandMap, nPixelSpace, nLineSpace, nBandSpace) write_marker() if val is None: write_int(CE_Failure) write_int(0) else: write_int(CE_None) write_int(len(val)) sys.stdout.write(val) elif instr == INSTR_FlushCache: if server_ds is not None: server_ds.FlushCache() write_marker() elif instr == INSTR_Band_FlushCache: val = band.FlushCache() write_marker() write_int(val) elif instr == INSTR_Band_GetCategoryNames: write_marker() # FIXME write_int(-1) elif instr == INSTR_Band_GetMetadata: domain = read_str() md = band.GetMetadata(domain) write_marker() write_int(len(md)) for key in md: write_str('%s=%s' % (key, md[key])) elif instr == INSTR_Band_GetMetadataItem: key = read_str() domain = read_str() val = band.GetMetadataItem(key, domain) write_marker() write_str(val) elif instr == INSTR_Band_GetColorInterpretation: val = band.GetColorInterpretation() write_marker() write_int(val) elif instr == INSTR_Band_GetNoDataValue: val = band.GetNoDataValue() write_marker() if val is None: write_int(0) write_double(0) else: write_int(1) write_double(val) elif instr == INSTR_Band_GetMinimum: val = band.GetMinimum() write_marker() if val is None: write_int(0) write_double(0) else: write_int(1) write_double(val) elif instr == INSTR_Band_GetMaximum: val = band.GetMaximum() write_marker() if val is None: write_int(0) write_double(0) else: write_int(1) write_double(val) elif instr == INSTR_Band_GetOffset: val = band.GetOffset() write_marker() if val is None: write_int(0) write_double(0) else: write_int(1) write_double(val) elif instr == INSTR_Band_GetScale: val = band.GetScale() write_marker() if val is None: write_int(0) write_double(1) # default value is 1 else: write_int(1) write_double(val) elif instr == INSTR_Band_IReadBlock: nXBlockOff = read_int() nYBlockOff = read_int() val = band.IReadBlock(nXBlockOff, nYBlockOff) write_marker() if val is None: write_int(CE_Failure) length = band.BlockXSize * band.BlockYSize * (gdal.GetDataTypeSize(band.DataType) / 8) write_int(length) sys.stdout.write(''.join('\0' for i in range(length))) else: write_int(CE_None) write_int(len(val)) sys.stdout.write(val) elif instr == INSTR_Band_IRasterIO_Read: nXOff = read_int() nYOff = read_int() nXSize = read_int() nYSize = read_int() nBufXSize = read_int() nBufYSize = read_int() nBufType = read_int() val = band.IRasterIO_Read(nXOff, nYOff, nXSize, nYSize, nBufXSize, nBufYSize, nBufType) write_marker() if val is None: write_int(CE_Failure) write_int(0) else: write_int(CE_None) write_int(len(val)) sys.stdout.write(val) elif instr == INSTR_Band_GetStatistics: approx_ok = read_int() force = read_int() val = band.GetStatistics(approx_ok, force) write_marker() if val is None or val[3] < 0: write_int(CE_Failure) else: write_int(CE_None) write_double(val[0]) write_double(val[1]) write_double(val[2]) write_double(val[3]) elif instr == INSTR_Band_ComputeRasterMinMax: approx_ok = read_int() val = band.ComputeRasterMinMax(approx_ok) write_marker() if val is None: write_int(CE_Failure) else: write_int(CE_None) write_double(val[0]) write_double(val[1]) elif instr == INSTR_Band_GetHistogram: dfMin = read_double() dfMax = read_double() nBuckets = read_int() bIncludeOutOfRange = read_int() bApproxOK = read_int() val = band.GetHistogram(dfMin, dfMax, nBuckets, bIncludeOutOfRange, bApproxOK) write_marker() if val is None: write_int(CE_Failure) else: write_int(CE_None) write_int(len(val) * 8) for v in val: write_uint64(v) # elif instr == INSTR_Band_GetDefaultHistogram: # bForce = read_int() # write_marker() # write_int(CE_Failure) elif instr == INSTR_Band_HasArbitraryOverviews: val = band.HasArbitraryOverviews() write_marker() write_int(val) elif instr == INSTR_Band_GetOverviewCount: val = band.GetOverviewCount() write_marker() write_int(val) elif instr == INSTR_Band_GetOverview: iovr = read_int() ovr_band = band.GetOverview(iovr) write_marker() write_band(ovr_band, len(server_bands)) if ovr_band is not None: server_bands.append(ovr_band) elif instr == INSTR_Band_GetMaskBand: msk_band = band.GetMaskBand() write_marker() write_band(msk_band, len(server_bands)) if msk_band is not None: server_bands.append(msk_band) elif instr == INSTR_Band_GetMaskFlags: val = band.GetMaskFlags() write_marker() write_int(val) elif instr == INSTR_Band_GetColorTable: ct = band.GetColorTable() write_marker() write_ct(ct) elif instr == INSTR_Band_GetUnitType: val = band.GetUnitType() write_marker() write_str(val) # elif instr == INSTR_Band_GetDefaultRAT: # write_marker() # # FIXME # write_int(0) else: break write_zero_error()
def _stretch_PERCENT(vrtcols, vrtrows, ds, bands, low, high): ''' Min, max percentage stretch. For further info on VRT's, see the U{GDAL VRT Tutorial<http://www.gdal.org/gdal_vrttut.html>} @type ds: C{gdal.Dataset} @param ds: A gdal dataset object @type bands: C{[int,...,int]} @param bands: A list of band numbers to output (in output order). E.g [4,2,1] Band numbers are not zero indexed. @type low: C{float} @param low: Minimum percentage @type high: C{float} @param high: Maximum percentage @rtype: C{xml} @return: VRT XML string ''' #if low >=1: if high > 1: low = low / 100.0 high = high / 100.0 vrt = [] for bandnum, band in enumerate(bands): rb = ds.GetRasterBand(band) nodata = rb.GetNoDataValue() nbits = gdal.GetDataTypeSize(rb.DataType) dfScaleSrcMin, dfScaleSrcMax = GetDataTypeRange(rb.DataType) try: dfBandMin, dfBandMax, dfBandMean, dfBandStdDev = GetStatistics( rb, 1, 1) except: dfBandMin, dfBandMax, dfBandMean, dfBandStdDev = GetStatistics( rb, 0, 1) dfBandRange = dfBandMax - dfBandMin if nbits == 8 or 2 < dfBandRange <= 255: nbins = int(math.ceil(dfBandRange)) binsize = 1 else: nbins = 256 binsize = int(math.ceil(dfBandRange / nbins)) if binsize <= 2: binsize = dfBandRange / nbins #hs=rb.GetHistogram(dfBandMin+abs(dfBandMin)*0.0001,dfBandMax-abs(dfBandMax)*0.0001, nbins,include_out_of_range=1,approx_ok=0) #hs=rb.GetHistogram(dfBandMin+abs(dfBandMin)*0.0001,dfBandMax-abs(dfBandMax)*0.0001, nbins,include_out_of_range=1,approx_ok=1) hs = rb.GetHistogram(dfBandMin + abs(dfBandMin) * 0.0001, dfBandMax - abs(dfBandMax) * 0.0001, nbins, include_out_of_range=0, approx_ok=1) #Check that outliers haven't really skewed the histogram #this is a kludge to workaround datasets with multiple nodata values # for j in range(0,10): # if len([v for v in hs if v > 0]) < nbins/4: #if only 25% of the bins have values... # startbin=256 # lastbin=0 # for i,bin in enumerate(hs): # if bin > 0: # lastbin=i # if i<startbin:startbin=i # dfBandMin=dfBandMin+startbin*binsize # dfBandMax=dfBandMin+lastbin*binsize+binsize # #hs=rb.GetHistogram(dfBandMin-abs(dfBandMin)*0.0001,dfBandMax+abs(dfBandMax)*0.0001,include_out_of_range=1,approx_ok=0) # hs=rb.GetHistogram(dfBandMin-abs(dfBandMin)*0.0001,dfBandMax+abs(dfBandMax)*0.0001,include_out_of_range=1,approx_ok=1) # if nbits == 8:binsize=1 # else:binsize=(dfBandRange)/nbins # else:break try: dfScaleSrcMin = max([ dfScaleSrcMin, HistPercentileValue(hs, low, binsize, dfBandMin) ]) dfScaleSrcMax = min([ dfScaleSrcMax, HistPercentileValue(hs, high, binsize, dfBandMin) ]) dfScaleDstMin, dfScaleDstMax = 0.0, 255.0 #Always going to be Byte for output jpegs dfScale = (dfScaleDstMax - dfScaleDstMin) / (dfScaleSrcMax - dfScaleSrcMin) dfOffset = -1 * dfScaleSrcMin * dfScale + dfScaleDstMin except: dfOffset = 0 dfScale = 1 vrt.append(' <VRTRasterBand dataType="Byte" band="%s">' % str(bandnum + 1)) #if nodata is not None:vrt.append(' <NoDataValue>%s</NoDataValue>'%nodata) vrt.append(' <ComplexSource>') vrt.append( ' <SourceFilename relativeToVRT="0">%s</SourceFilename>' % ds.GetDescription()) vrt.append(' <SourceBand>%s</SourceBand>' % band) vrt.append(' <SrcRect xOff="0" yOff="0" xSize="%s" ySize="%s"/>' % (ds.RasterXSize, ds.RasterYSize)) vrt.append(' <DstRect xOff="0" yOff="0" xSize="%s" ySize="%s"/>' % (vrtcols, vrtrows)) vrt.append(' <ScaleOffset>%s</ScaleOffset>' % dfOffset) vrt.append(' <ScaleRatio>%s</ScaleRatio>' % dfScale) vrt.append(' </ComplexSource>') vrt.append(' </VRTRasterBand>') return '\n'.join(vrt)
def rescale(in_pathname, datatype, out_pathname=None, band_index=1, **kwargs): """ Placeholder """ # open input file in_ds = gdal.Open(in_pathname) if in_ds is not None: # create copy out_ds, out_pathname = createCopy(in_pathname, datatype, out_pathname=out_pathname, **kwargs) if out_ds is not None: # get bands and dimensions in_band = in_ds.GetRasterBand(band_index) out_band = out_ds.GetRasterBand(1) nCols, nRows = in_band.XSize, in_band.YSize # get min and max values from scale and offset type_max = 2**gdal.GetDataTypeSize(datatype) - 2 data_min = out_band.GetOffset() data_max = out_band.GetOffset() + (type_max * out_band.GetScale()) # for each row rowRange = range(nRows) for row in rowRange: # read row and transform in_data = in_band.ReadAsArray(0, row, nCols, 1) idx = ~np.isclose(in_data, in_band.GetNoDataValue()) in_data[idx] = (in_data[idx] * in_band.GetScale()) + in_band.GetOffset() # compute rescaled data out_data = np.zeros((1, nCols)) out_data[idx] = np.clip(in_data[idx], data_min, data_max) out_data[idx] = np.round( (in_data[idx] - out_band.GetOffset()) / out_band.GetScale()) out_data[~idx] = out_band.GetNoDataValue() # write array to output if datatype == gdal.GDT_UInt16: out_data = np.uint16(out_data) if datatype == gdal.GDT_Byte: out_data = np.uint8(out_data) out_band.WriteArray(out_data, 0, row) # close output out_ds.FlushCache() out_ds = None # close input in_ds = None return out_pathname
def createCopy(in_pathname, datatype, out_pathname=None, **kwargs): """ Placeholder """ out_ds = None # get default output name if out_pathname is None: out_pathname = in_pathname.replace('.tif', '-copy.tif') # delete existing file if overwrite enabled overwrite = kwargs.pop('overwrite', False) if os.path.exists(out_pathname) and overwrite is True: os.remove(out_pathname) # output file does not exist if not os.path.exists(out_pathname): # open input file in_ds = gdal.Open(in_pathname) if in_ds is not None: # get band and dimensions in_band = in_ds.GetRasterBand(1) nCols, nRows = in_band.XSize, in_band.YSize # create output object - single byte band driver = gdal.GetDriverByName("GTiff") out_ds = driver.Create(out_pathname, nCols, nRows, 1, datatype, kwargs.pop('options', None)) if (out_ds is not None): out_band = out_ds.GetRasterBand(1) # set geotransform / projection as input out_ds.SetGeoTransform(in_ds.GetGeoTransform()) out_ds.SetProjection(in_ds.GetProjection()) # need nodata value for postgis no_data = 2**gdal.GetDataTypeSize(datatype) - 1 out_band.SetNoDataValue(no_data) # get range - either defined or image stats data_min = kwargs.pop('data_min', None) data_max = kwargs.pop('data_max', None) if data_min is None or data_max is None: stats = in_band.GetStatistics(True, True) data_min = stats[0] data_max = stats[1] # compute linear scale and offset out_band.SetScale((data_max - data_min) / (no_data - 1)) out_band.SetOffset(data_min) # close input in_ds = None return out_ds, out_pathname
def getoverview(self, outfile=None, width=800, format='JPG'): ''' Generate overviews for generic imagery @requires:exposing a gdal.Dataset object as self._gdaldataset @note: There are a number of ways of controlling the generation of overview images: - Overriding this class method and writing your own. - Setting self._stretch to the appropriate (stretch_type,rgb_bands,stretch_args) values. See L{overviews.getoverview}. - Customising self._gdaldataset using a VRT, for example: setting red, green and blue color interpretations for selected bands @type outfile: string @param outfile: a filepath to the output overview image. If supplied, format is determined from the file extension @type width: integer @param width: image width @type format: string @param format: format to generate overview image, one of ['JPG','PNG','GIF','BMP','TIF']. Not required if outfile is supplied. @return: - B{filepath} (if outfile is supplied) B{OR} - B{binary image data} (if outfile is not supplied) ''' md = self.metadata ds = self._gdaldataset if not ds: raise AttributeError, 'No GDALDataset object available, overview image can not be generated' #Don't rely on the metadata as self._gdaldataset might be a custom VRT ##nbands=md['nbands'] ##cols=md['cols'] ##rows=md['rows'] ##nbits=md['nbits'] rb = ds.GetRasterBand(1) nbands = ds.RasterCount cols = ds.RasterXSize rows = ds.RasterYSize nbits = gdal.GetDataTypeSize(rb.DataType) datatype = gdal.GetDataTypeName(rb.DataType) stretch_type = None stretch_args = None rgb_bands = {} #Check for pre-defined stretch if self._stretch: stretch_type, rgb_bands, stretch_args = self._stretch else: #Check for pre-defined rgb bands, if 8 bit - assume they don't need stretching for i in range(1, nbands + 1): gci = ds.GetRasterBand(i).GetRasterColorInterpretation() if gci == gdal.GCI_RedBand: rgb_bands[0] = i elif gci == gdal.GCI_GreenBand: rgb_bands[1] = i elif gci == gdal.GCI_BlueBand: rgb_bands[2] = i if len(rgb_bands) == 3: rgb_bands = rgb_bands[0], rgb_bands[1], rgb_bands[ 2] #Make a list from the dict if nbits == 8: stretch_type = 'NONE' stretch_args = [] else: stretch_type = 'STDDEV' stretch_args = [2] break #Set some defaults if stretch_type is None or stretch_args is None: if nbands < 3: #Default - assume greyscale #stretch_type='PERCENT' #stretch_args=[2,98] stretch_type = 'STDDEV' stretch_args = [2] rgb_bands = [1] #But check if there's an attribute table or colour table #and change the stretch type to colour table if datatype in ['Byte', 'Int16', 'UInt16']: ct = rb.GetColorTable() at = rb.GetDefaultRAT() min, max = rb.ComputeRasterMinMax() if ct and ct.GetCount( ) > 0 and min >= 0: #GDAL doesn't like colourtables with negative values stretch_type = 'COLOURTABLE' stretch_args = [] elif at and at.GetRowCount() > 0 and at.GetRowCount( ) < 256: stretch_type = 'RANDOM' stretch_args = [] elif nbands == 3: #Assume RGB if nbits > 8: stretch_type = 'PERCENT' stretch_args = [2, 98] else: stretch_type = 'NONE' stretch_args = [] if len(rgb_bands) < 3: rgb_bands = [1, 2, 3] elif nbands >= 4: stretch_type = 'PERCENT' stretch_args = [2, 98] #stretch_type='STDDEV' #stretch_args=[2] if len(rgb_bands) < 3: rgb_bands = [3, 2, 1] if not rgb_bands: rgb_bands = [1] return overviews.getoverview(ds, outfile, width, format, rgb_bands, stretch_type, *stretch_args)
######### BAND 1 - BLUE # Open the blue band in our image blue = dataset.GetRasterBand(1) print(blue) # What is the band's datatype? datatype = blue.DataType print('Band datatype: {dt}'.format(dt=blue.DataType)) # If you recall from our discussion of enumerated types, this "3" we printed has a more useful definition for us to use datatype_name = gdal.GetDataTypeName(blue.DataType) print('Band datatype: {dt}'.format(dt=datatype_name)) # We can also ask how much space does this datatype take up bytes = gdal.GetDataTypeSize(blue.DataType) print('Band datatype size: {b} bytes\n'.format(b=bytes)) # How about some band statistics? band_max, band_min, band_mean, band_stddev = blue.GetStatistics(0, 1) print('Band range: {minimum} - {maximum}'.format(maximum=band_max, minimum=band_min)) print('Band mean, stddev: {m}, {s}\n'.format(m=band_mean, s=band_stddev)) ######### BAND 2 - GREEN green = dataset.GetRasterBand(2) print(green) # What is the band's datatype? datatype = green.DataType
def ParallelRCP(in_dem_path, out_dem_path, chunk_size, overlap, method, options, num_threads=1, verbose=False): ''' Breaks a raster into smaller chunks for easier processing. This method determines the file parameters, prepares the output parameter, calculates the start/end indices for each chunk, and stores info about each chunk in a Chunk() object. This object is then passed to mp.pool() along with a call to ProcessSuperArray() to perform the actual processing in parallel. in_dem_path: Full path to input raster. out_dem_path: Full path to resulting raster. chunk_size: Square dimension of data chunk to process. overlap: Data to be read beyond dimensions of chunk_size to ensure methods that require neighboring pixels produce accurate results on the borders. Should be at least 2x any filter or kernel size for any method (will automattically be set if method is blur_gauss, blur_mean, clahe, or TPI). method: Name of the raster processing tool to be run on the chunks. options: Dictionary of opt, value pairs to be passed to the processing tool. Any opts that don't apply to the specific method will be ignored. num_threads: The number of concurrent processes to be spawned by mp.pool(). verbose: Flag to print out more information (including mdenoise output) Returns the time needed to process the entire raster. ''' start = datetime.datetime.now() # Method name and option checks if method == "blur_gauss": gauss_opts = ["radius", "sigma"] for opt in gauss_opts: # if the req'd option isn't in the options dictionary or the value # in the dictionary is None if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) # Check overlap against radius if overlap < 2 * options["radius"]: overlap = 2 * options["radius"] elif method == "blur_mean": mean_opts = ["radius"] for opt in mean_opts: if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) if overlap < 2 * options["radius"]: overlap = 2 * options["radius"] elif method == "blur_toews": mean_opts = ["radius"] for opt in mean_opts: if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) if overlap < 2 * options["radius"]: overlap = 2 * options["radius"] elif method == "mdenoise": mdenoise_opts = ["t", "n", "v"] for opt in mdenoise_opts: if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) elif method == "clahe": clahe_opts = ["kernel_size", "clip_limit"] for opt in clahe_opts: if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) if overlap < 2 * options["kernel_size"]: overlap = 2 * options["kernel_size"] elif method == "TPI": TPI_opts = ["radius"] for opt in TPI_opts: if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) if overlap < 2 * options["radius"]: overlap = 2 * options["radius"] elif method == "hillshade": hillshade_opts = ["alt", "az"] for opt in hillshade_opts: if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) elif method == "skymodel": sky_opts = ["lum_file"] for opt in sky_opts: if opt not in options or not options[opt]: raise ValueError( "Required option {} not provided for method {}.".format( opt, method)) elif method == "test": pass else: raise NotImplementedError("Method not recognized: {}".format(method)) # If we're doing a skymodel, we need to read in the whole luminance file # and add that list to the options dictionary if method == "skymodel": if verbose: print("Reading in luminance file {}".format(options["lum_file"])) lines = [] with open(options["lum_file"], 'r') as l: reader = csv.reader(l) for line in reader: lines.append(line) options["lum_lines"] = lines gdal.UseExceptions() # Get source file metadata (dimensions, driver, proj, cell size, nodata) print("Processing {0:s}...".format(in_dem_path)) s_fh = gdal.Open(in_dem_path, gdal.GA_ReadOnly) rows = s_fh.RasterYSize cols = s_fh.RasterXSize driver = s_fh.GetDriver() bands = s_fh.RasterCount s_band = s_fh.GetRasterBand(1) # Get source georeference info transform = s_fh.GetGeoTransform() projection = s_fh.GetProjection() cell_size = abs(transform[5]) # Assumes square pixels where height=width s_nodata = s_band.GetNoDataValue() if s_nodata is None and bands == 1: # assume a multiband file is an image raise ValueError("No NoData value set in input DEM.") if verbose and s_nodata is not None: # Report the source nodata if present print("\tSource NoData Value: {0:f}\n".format(s_nodata)) # Close source file handle s_band = None s_fh = None # Set up target file in preparation for future writes # If we've been given a vrt as a source, force the output to be geotiff if driver.LongName == 'Virtual Raster': driver = gdal.GetDriverByName('gtiff') if os.path.exists(out_dem_path): raise IOError("Output file {} already exists.".format(out_dem_path)) # Set outfile options # If it's hillshade or skymodel, we want nodata = 0 and gdal byte # THIS WAS FOR SCALING, BUT SCALING DOESN'T WORK (SEE NOTE IN SKYMODEL) # Now using for CLAHE if method in ['clahe']: t_nodata = 0 dtype = gdal.GDT_Byte else: t_nodata = s_nodata dtype = gdal.GDT_Float32 # compression Options jpeg_opts = [ "compress=jpeg", "interleave=pixel", "photometric=ycbcr", "tiled=yes", "jpeg_quality=90", "bigtiff=yes" ] lzw_opts = ["compress=lzw", "tiled=yes", "bigtiff=yes"] # Use jpeg compression opts if three bands, otherwise lzw if bands == 3 and driver.LongName == 'GeoTIFF': opts = jpeg_opts elif driver.LongName == 'GeoTIFF': opts = lzw_opts else: opts = [] t_fh = driver.Create(out_dem_path, cols, rows, bands, dtype, options=opts) t_fh.SetGeoTransform(transform) t_fh.SetProjection(projection) t_band = t_fh.GetRasterBand(1) if bands == 1: t_band.SetNoDataValue(t_nodata) if verbose: #print("Method: {}".format(method)) print("Options:") for opt in options: print("\t{}: {}".format(opt, options[opt])) print("Preparing output file {}...".format(out_dem_path)) print("\tOutput dimensions: {} rows by {} columns.".format(rows, cols)) print("\tOutput data type: {}".format( gdal_array.GDALTypeCodeToNumericTypeCode(dtype))) print("\tOutput size: {}".format( sizeof_fmt(bands * rows * cols * gdal.GetDataTypeSize(dtype) / 8))) print("\tOutput NoData Value: {}".format(t_nodata)) # Close target file handle (causes entire file to be written to disk) t_band = None t_fh = None # We could probably code up an automatic chunk_size setter based on # data type and system memory limits # calculate breaks every chunk_size pixels row_splits = list(range(0, rows, chunk_size)) col_splits = list(range(0, cols, chunk_size)) # add total number of rows/cols to be last break (used for x/y_end) row_splits.append(rows) col_splits.append(cols) # List of chunks to be iterated over with pool.map() iterables = [] total_chunks = (len(row_splits) - 1) * (len(col_splits) - 1) progress = 0 # Double the overlap just to be safe. This distance becomes one side of # the super_array beyond the wanted data (f2 <> x values <> f2) # if there's only one chunk, set overlap to 0 so that read indeces # aren't out of bounds if total_chunks > 1: f2 = 2 * overlap else: f2 = 0 # === Multiprocessing notes === # Procedure: open s/t, get and set relevant metadata, close, create # list of chunk objects, create pool, execute super_array with # map(function, list of chunks) # x/y_start = col/row_splits[j/i]- starting original raster index # of the chunk # x/y_end = col/row_splits[j/i +1]- ending (up to, not including) # original raster index of the chunk # Create simple chunk objects that hold data about each chunk to be # sent to the processor # Rows = i = y values, cols = j = x values for i in range(0, len(row_splits) - 1): for j in range(0, len(col_splits) - 1): progress += 1 # chunk object to hold all the data chunk = Chunk() # These are specific to each chunk chunk.progress = progress chunk.tile = "{}-{}".format(i, j) # x/y_start are the starting position of the original chunk # before adjusting the dimensions to read in the super array; # they are not used directly in the ReadAsArray() calls but are # used as the location that the altered array should be # written in the output bands WriteArray() calls. chunk.x_start = col_splits[j] chunk.y_start = row_splits[i] # end positions of initial chunk, used to compute read window chunk.x_end = col_splits[j + 1] chunk.y_end = row_splits[i + 1] # These are constant over the whole raster chunk.s_nodata = s_nodata chunk.t_nodata = t_nodata chunk.cell_size = cell_size chunk.mdenoise_path = mdenoise_path chunk.in_dem_path = in_dem_path chunk.out_dem_path = out_dem_path chunk.f2 = f2 chunk.rows = rows chunk.cols = cols chunk.total_chunks = total_chunks chunk.method = method chunk.options = options chunk.verbose = verbose chunk.start_time = start chunk.bands = bands iterables.append(chunk) # Create lock to lock s_fh and t_fh reads and writes l = mp.Lock() print("\nProcessing chunks...") # Call pool.map with the lock initializer method, super array # processor, and list of chunk objects. # chunksize=1 keeps the input processing more-or-less in order # (otherwise, for 4 processes working on 100 chunks, each process # starts at 0, 25, 50, and 75). # pool.map() guarantees the results will be in order, but not # necessarily the processing. # maxtasksperchild sets a limit on the number of tasks assigned to each # process, hopefully limiting memory leaks within each subprocess with mp.Pool(processes=num_threads, initializer=lock_init, initargs=(l, ), maxtasksperchild=10) as pool: pool.map(ProcessSuperArray, iterables, chunksize=1) finish = datetime.datetime.now() - start if verbose: print(finish) return (finish)
def __getmetadata__(self, f=None): ''' Generate metadata for generic imagery @type f: string @param f: a filepath to the dataset or a VRT XML string @return: None @todo: We force a NoData value. This is not ideal, but it makes for better overview images. ''' if not f: f = self.fileinfo['filepath'] try: cwd = os.path.abspath(os.curdir) if os.path.exists(f) and os.path.dirname(f): p = os.path.split(f)[0] os.chdir(p) if not self._gdaldataset: self._gdaldataset = geometry.OpenDataset( f ) #in case we're subclassed and there's already a dataset open if self._gdaldataset: driver = self._gdaldataset.GetDriver().ShortName if driver[0:3] == 'HDF': raise NotImplementedError, 'HDF files are not yet implemented except by custom formats' self.metadata[ 'filetype'] = driver + '/' + self._gdaldataset.GetDriver( ).LongName self.metadata['cols'] = self._gdaldataset.RasterXSize self.metadata['rows'] = self._gdaldataset.RasterYSize self.metadata['nbands'] = self._gdaldataset.RasterCount self.metadata['srs'] = self._gdaldataset.GetProjection() if not self.metadata['srs'] and self._gdaldataset.GetGCPCount( ) > 0: self.metadata['srs'] = self._gdaldataset.GetGCPProjection() self.metadata['epsg'] = spatialreferences.IdentifyAusEPSG( self.metadata['srs']) self.metadata['units'] = spatialreferences.GetLinearUnitsName( self.metadata['srs']) geotransform = self._gdaldataset.GetGeoTransform() if geotransform == (0, 1, 0, 0, 0, 1): if self._gdaldataset.GetGCPCount() > 0: gcps = self._gdaldataset.GetGCPs() geotransform = gdal.GCPsToGeoTransform(gcps) gcps = geometry.GeoTransformToGCPs( geotransform, self.metadata['cols'], self. metadata['rows']) #Just get the 4 corner GCP's else: raise NotImplementedError, 'Dataset is not georeferenced' else: gcps = geometry.GeoTransformToGCPs(geotransform, self.metadata['cols'], self.metadata['rows']) ext = [[gcp.GCPX, gcp.GCPY] for gcp in gcps] ext.append([gcps[0].GCPX, gcps[0].GCPY ]) #Add the 1st point to close the polygon) #Reproject corners to lon,lat geom = geometry.GeomFromExtent(ext) src_srs = osr.SpatialReference() src_srs.ImportFromWkt(self.metadata['srs']) tgt_srs = osr.SpatialReference() tgt_srs.ImportFromEPSG(4326) geom = geometry.ReprojectGeom(geom, src_srs, tgt_srs) points = geom.GetGeometryRef(0) #geom.GetBoundary() ext = [[points.GetX(i), points.GetY(i)] for i in range(0, points.GetPointCount())] self.metadata['cellx'], self.metadata[ 'celly'] = geometry.CellSize(geotransform) self.metadata['rotation'] = geometry.Rotation(geotransform) if abs(self.metadata['rotation']) < 1.0: self.metadata['orientation'] = 'Map oriented' self.metadata['rotation'] = 0.0 else: self.metadata['orientation'] = 'Path oriented' self.metadata['UL'] = '%s,%s' % tuple(ext[0]) self.metadata['LL'] = '%s,%s' % tuple(ext[1]) self.metadata['LR'] = '%s,%s' % tuple(ext[2]) self.metadata['UR'] = '%s,%s' % tuple(ext[3]) rb = self._gdaldataset.GetRasterBand(1) if rb: self.metadata['datatype'] = gdal.GetDataTypeName( rb.DataType) self.metadata['nbits'] = gdal.GetDataTypeSize(rb.DataType) nodata = rb.GetNoDataValue() if nodata is not None: self.metadata['nodata'] = str(nodata) else: ct = rb.GetColorTable() #Fix for Issue 31 if ct is None: if self.metadata['datatype'][0:4] in [ 'Byte', 'UInt' ]: nodata = 0 #Unsigned, assume 0 else: nodata = -2**( self.metadata['nbits'] - 1 ) #Signed, assume min value in data range self.metadata['nodata'] = str(nodata) #Fix for Issue 17 for i in range(1, self._gdaldataset.RasterCount + 1): self._gdaldataset.GetRasterBand( i).SetNoDataValue(nodata) else: raise IOError, 'No valid rasterbands found.' metadata = self._gdaldataset.GetMetadata() self.metadata['metadata'] = '\n'.join( ['%s: %s' % (m, metadata[m]) for m in metadata]) self.metadata['filesize'] = sum( [os.path.getsize(tmp) for tmp in self.filelist]) if self.metadata['filesize'] > 0: self.metadata['compressionratio'] = int( (self.metadata['nbands'] * self.metadata['cols'] * self.metadata['rows'] * (self.metadata['nbits'] / 8.0)) / self.metadata['filesize']) if self.metadata['compressionratio'] > 0: try: if driver[0:3] == 'JP2': self.metadata['compressiontype'] = "JPEG2000" elif driver[0:3] == 'ECW': self.metadata['compressiontype'] = "ECW" else: mdis = self._gdaldataset.GetMetadata( 'IMAGE_STRUCTURE') #self.metadata['compressiontype']=mdis['IMAGE_STRUCTURE'] self.metadata['compressiontype'] = mdis[ 'COMPRESSION'] except: self.metadata['compressiontype'] = 'Unknown' else: self.metadata['compressiontype'] = 'None' self.extent = ext else: errmsg = gdal.GetLastErrorMsg() raise IOError, 'Unable to open %s\n%s' % (f, errmsg.strip()) finally: #Cleanup gdal.ErrorReset() os.chdir(cwd)