def read_color_file(self, color_filename_or_lines): if isinstance(color_filename_or_lines, ColorPalette): return self elif color_filename_or_lines is None: self.pal.clear() return self elif base.is_path_like(color_filename_or_lines): color_filename_or_lines = open( str(color_filename_or_lines)).readlines() elif not base.is_sequence(color_filename_or_lines): raise Exception('unknown input {}'.format(color_filename_or_lines)) self.pal.clear() for line in color_filename_or_lines: split_line = line.strip().split(' ', 1) if len(split_line) < 2: continue try: color = self.pal_color_to_rgb(split_line[1]) key = split_line[0].strip() except: raise Exception('Error reading palette line: {}'.format(line)) try: key = base.num(key) except ValueError: # should be percent self._all_numeric = False pass self.pal[key] = color
def __init__(self, filename_or_ds: path_or_ds, silent_fail=False, *args, **kwargs): self.ds: Optional[gdal.Dataset] = None self.filename: Optional[PathLike] = None if is_path_like(filename_or_ds): self.filename = str(filename_or_ds) else: self.ds = filename_or_ds self.args = args self.kwargs = kwargs self.own = False self.silent_fail = silent_fail
def get_file_from_strings(color_palette: ColorPaletteOrPathOrStrings): temp_color_filename = None if isinstance(color_palette, ColorPalette): temp_color_filename = tempfile.mktemp(suffix='.txt') color_filename = temp_color_filename color_palette.write_color_file(temp_color_filename) elif base.is_path_like(color_palette): color_filename = color_palette elif base.is_sequence(color_palette): temp_color_filename = tempfile.mktemp(suffix='.txt') color_filename = temp_color_filename with open(temp_color_filename, 'w') as f: for item in color_palette: f.write(item + '\n') else: raise Exception('Unknown color palette type {}'.format(color_palette)) return color_filename, temp_color_filename
def doit(opts, args): # pylint: disable=unused-argument if opts.debug: print("gdal_calc.py starting calculation %s" % (opts.calc)) # set up global namespace for eval with all functions of gdal_array, numpy global_namespace = { key: getattr(module, key) for module in [gdal_array, numpy] for key in dir(module) if not key.startswith('__') } if opts.user_namespace: global_namespace.update(opts.user_namespace) if not opts.calc: raise Exception("No calculation provided.") elif not opts.outF and opts.format.upper() != 'MEM': raise Exception("No output file provided.") if opts.format is None: opts.format = GetOutputDriverFor(opts.outF) if not hasattr(opts, "color_table"): opts.color_table = None if isinstance(opts.extent, GeoRectangle): pass elif opts.projwin: if isinstance(opts.projwin, GeoRectangle): opts.extent = opts.projwin else: opts.extent = GeoRectangle.from_lurd(*opts.projwin) elif not opts.extent: opts.extent = Extent.IGNORE else: opts.extent = extent_util.parse_extent(opts.extent) compatible_gt_eps = 0.000001 gt_diff_support = { GT.INCOMPATIBLE_OFFSET: opts.extent != Extent.FAIL, GT.INCOMPATIBLE_PIXEL_SIZE: False, GT.INCOMPATIBLE_ROTATION: False, GT.NON_ZERO_ROTATION: False, } gt_diff_error = { GT.INCOMPATIBLE_OFFSET: 'different offset', GT.INCOMPATIBLE_PIXEL_SIZE: 'different pixel size', GT.INCOMPATIBLE_ROTATION: 'different rotation', GT.NON_ZERO_ROTATION: 'non zero rotation', } ################################################################ # fetch details of input layers ################################################################ # set up some lists to store data for each band myFileNames = [] # input filenames myFiles = [] # input DataSets myBands = [] # input bands myAlphaList = [] # input alpha letter that represents each input file myDataType = [] # string representation of the datatype of each input file myDataTypeNum = [] # datatype of each input file myNDV = [] # nodatavalue for each input file DimensionsCheck = None # dimensions of the output Dimensions = [] # Dimensions of input files ProjectionCheck = None # projection of the output GeoTransformCheck = None # GeoTransform of the output GeoTransforms = [] # GeoTransform of each input file GeoTransformDiffer = False # True if we have inputs with different GeoTransforms myTempFileNames = [] # vrt filename from each input file myAlphaFileLists = [] # list of the Alphas which holds a list of inputs # loop through input files - checking dimensions for alphas, filenames in opts.input_files.items(): if isinstance(filenames, (list, tuple)): # alpha is a list of files myAlphaFileLists.append(alphas) elif is_path_like(filenames) or isinstance(filenames, gdal.Dataset): # alpha is a single filename or a Dataset filenames = [filenames] alphas = [alphas] else: # I guess this alphas should be in the global_namespace, # It would have been better to pass it as user_namepsace, but I'll accept it anyway global_namespace[alphas] = filenames continue for alpha, filename in zip(alphas * len(filenames), filenames): if not alpha.endswith("_band"): # check if we have asked for a specific band... if "%s_band" % alpha in opts.input_files: myBand = opts.input_files["%s_band" % alpha] else: myBand = 1 myF_is_ds = not is_path_like(filename) if myF_is_ds: myFile = filename filename = None else: filename = str(filename) myFile = gdal.Open(filename, gdal.GA_ReadOnly) if not myFile: raise IOError("No such file or directory: '%s'" % filename) myFileNames.append(filename) myFiles.append(myFile) myBands.append(myBand) myAlphaList.append(alpha) dt = myFile.GetRasterBand(myBand).DataType myDataType.append(gdal.GetDataTypeName(dt)) myDataTypeNum.append(dt) myNDV.append(None if opts.hideNoData else myFile. GetRasterBand(myBand).GetNoDataValue()) # check that the dimensions of each layer are the same myFileDimensions = [myFile.RasterXSize, myFile.RasterYSize] if DimensionsCheck: if DimensionsCheck != myFileDimensions: GeoTransformDiffer = True if opts.extent in [Extent.IGNORE, Extent.FAIL]: raise Exception( "Error! Dimensions of file %s (%i, %i) are different from other files (%i, %i). Cannot proceed" % (filename, myFileDimensions[0], myFileDimensions[1], DimensionsCheck[0], DimensionsCheck[1])) else: DimensionsCheck = myFileDimensions # check that the Projection of each layer are the same myProjection = myFile.GetProjection() if ProjectionCheck: if opts.projectionCheck and ProjectionCheck != myProjection: raise Exception( "Error! Projection of file %s %s are different from other files %s. Cannot proceed" % (filename, myProjection, ProjectionCheck)) else: ProjectionCheck = myProjection # check that the GeoTransforms of each layer are the same myFileGeoTransform = myFile.GetGeoTransform( can_return_null=True) if opts.extent == Extent.IGNORE: GeoTransformCheck = myFileGeoTransform else: Dimensions.append(myFileDimensions) GeoTransforms.append(myFileGeoTransform) if not GeoTransformCheck: GeoTransformCheck = myFileGeoTransform else: my_gt_diff = extent_util.gt_diff( GeoTransformCheck, myFileGeoTransform, eps=compatible_gt_eps, diff_support=gt_diff_support) if my_gt_diff not in [GT.SAME, GT.ALMOST_SAME]: GeoTransformDiffer = True if my_gt_diff != GT.COMPATIBLE_DIFF: raise Exception( "Error! GeoTransform of file {} {} is incompatible ({}), first file GeoTransform is {}. Cannot proceed" .format(filename, myFileGeoTransform, gt_diff_error[my_gt_diff], GeoTransformCheck)) if opts.debug: print("file %s: %s, dimensions: %s, %s, type: %s" % (alpha, filename, DimensionsCheck[0], DimensionsCheck[1], myDataType[-1])) # process allBands option allBandsIndex = None allBandsCount = 1 if opts.allBands: if len(opts.calc) > 1: raise Exception("Error! --allBands implies a single --calc") try: allBandsIndex = myAlphaList.index(opts.allBands) except ValueError: raise Exception( "Error! allBands option was given but Band %s not found. Cannot proceed" % (opts.allBands)) allBandsCount = myFiles[allBandsIndex].RasterCount if allBandsCount <= 1: allBandsIndex = None else: allBandsCount = len(opts.calc) if opts.extent not in [ Extent.IGNORE, Extent.FAIL ] and (GeoTransformDiffer or isinstance(opts.extent, GeoRectangle)): # mixing different GeoTransforms/Extents GeoTransformCheck, DimensionsCheck, ExtentCheck = extent_util.calc_geotransform_and_dimensions( GeoTransforms, Dimensions, opts.extent) if GeoTransformCheck is None: raise Exception( "Error! The requested extent is empty. Cannot proceed") for i in range(len(myFileNames)): temp_vrt_filename, temp_vrt_ds = extent_util.make_temp_vrt( myFiles[i], ExtentCheck) myTempFileNames.append(temp_vrt_filename) myFiles[i] = None # close original ds myFiles[i] = temp_vrt_ds # replace original ds with vrt_ds # update the new precise dimensions and gt from the new ds GeoTransformCheck = temp_vrt_ds.GetGeoTransform() DimensionsCheck = [ temp_vrt_ds.RasterXSize, temp_vrt_ds.RasterYSize ] temp_vrt_ds = None ################################################################ # set up output file ################################################################ # open output file exists if opts.outF and os.path.isfile(opts.outF) and not opts.overwrite: if allBandsIndex is not None: raise Exception( "Error! allBands option was given but Output file exists, must use --overwrite option!" ) if len(opts.calc) > 1: raise Exception( "Error! multiple calc options were given but Output file exists, must use --overwrite option!" ) if opts.debug: print("Output file %s exists - filling in results into file" % (opts.outF)) myOut = gdal.Open(opts.outF, gdal.GA_Update) if myOut is None: error = 'but cannot be opened for update' elif [myOut.RasterXSize, myOut.RasterYSize] != DimensionsCheck: error = 'but is the wrong size' elif ProjectionCheck and ProjectionCheck != myOut.GetProjection(): error = 'but is the wrong projection' elif GeoTransformCheck and GeoTransformCheck != myOut.GetGeoTransform( can_return_null=True): error = 'but is the wrong geotransform' else: error = None if error: raise Exception( "Error! Output exists, %s. Use the --overwrite option to automatically overwrite the existing file" % error) myOutB = myOut.GetRasterBand(1) myOutNDV = myOutB.GetNoDataValue() myOutType = myOutB.DataType else: if opts.outF: # remove existing file and regenerate if os.path.isfile(opts.outF): os.remove(opts.outF) # create a new file if opts.debug: print("Generating output file %s" % (opts.outF)) else: opts.outF = '' # find data type to use if not opts.type: # use the largest type of the input files myOutType = max(myDataTypeNum) else: myOutType = opts.type if isinstance(myOutType, str): myOutType = gdal.GetDataTypeByName(myOutType) # create file myOutDrv = gdal.GetDriverByName(opts.format) myOut = myOutDrv.Create(opts.outF, DimensionsCheck[0], DimensionsCheck[1], allBandsCount, myOutType, opts.creation_options) # set output geo info based on first input layer if not GeoTransformCheck: GeoTransformCheck = myFiles[0].GetGeoTransform( can_return_null=True) if GeoTransformCheck: myOut.SetGeoTransform(GeoTransformCheck) if not ProjectionCheck: ProjectionCheck = myFiles[0].GetProjection() if ProjectionCheck: myOut.SetProjection(ProjectionCheck) if opts.NoDataValue is None: myOutNDV = None if opts.hideNoData else DefaultNDVLookup[ myOutType] # use the default noDataValue for this datatype elif isinstance(opts.NoDataValue, str) and opts.NoDataValue.lower() == 'none': myOutNDV = None # not to set any noDataValue else: myOutNDV = opts.NoDataValue # use the given noDataValue for i in range(1, allBandsCount + 1): myOutB = myOut.GetRasterBand(i) if myOutNDV is not None: myOutB.SetNoDataValue(myOutNDV) if opts.color_table: # set color table and color interpretation if is_path_like(opts.color_table): opts.color_table = get_color_table(opts.color_table) myOutB.SetRasterColorTable(opts.color_table) myOutB.SetRasterColorInterpretation(gdal.GCI_PaletteIndex) myOutB = None # write to band myOutTypeName = gdal.GetDataTypeName(myOutType) if opts.debug: print("output file: %s, dimensions: %s, %s, type: %s" % (opts.outF, myOut.RasterXSize, myOut.RasterYSize, myOutTypeName)) ################################################################ # find block size to chop grids into bite-sized chunks ################################################################ # use the block size of the first layer to read efficiently myBlockSize = myFiles[0].GetRasterBand(myBands[0]).GetBlockSize() # find total x and y blocks to be read nXBlocks = (int)( (DimensionsCheck[0] + myBlockSize[0] - 1) / myBlockSize[0]) nYBlocks = (int)( (DimensionsCheck[1] + myBlockSize[1] - 1) / myBlockSize[1]) myBufSize = myBlockSize[0] * myBlockSize[1] if opts.debug: print("using blocksize %s x %s" % (myBlockSize[0], myBlockSize[1])) # variables for displaying progress ProgressCt = -1 ProgressMk = -1 ProgressEnd = nXBlocks * nYBlocks * allBandsCount ################################################################ # start looping through each band in allBandsCount ################################################################ for bandNo in range(1, allBandsCount + 1): ################################################################ # start looping through blocks of data ################################################################ # store these numbers in variables that may change later nXValid = myBlockSize[0] nYValid = myBlockSize[1] # loop through X-lines for X in range(0, nXBlocks): # in case the blocks don't fit perfectly # change the block size of the final piece if X == nXBlocks - 1: nXValid = DimensionsCheck[0] - X * myBlockSize[0] # find X offset myX = X * myBlockSize[0] # reset buffer size for start of Y loop nYValid = myBlockSize[1] myBufSize = nXValid * nYValid # loop through Y lines for Y in range(0, nYBlocks): ProgressCt += 1 if 10 * ProgressCt / ProgressEnd % 10 != ProgressMk and not opts.quiet: ProgressMk = 10 * ProgressCt / ProgressEnd % 10 from sys import version_info if version_info >= (3, 0, 0): exec('print("%d.." % (10*ProgressMk), end=" ")') else: exec('print 10*ProgressMk, "..",') # change the block size of the final piece if Y == nYBlocks - 1: nYValid = DimensionsCheck[1] - Y * myBlockSize[1] myBufSize = nXValid * nYValid # find Y offset myY = Y * myBlockSize[1] # create empty buffer to mark where nodata occurs myNDVs = None # make local namespace for calculation local_namespace = {} val_lists = defaultdict(list) # fetch data for each input layer for i, Alpha in enumerate(myAlphaList): # populate lettered arrays with values if allBandsIndex is not None and allBandsIndex == i: myBandNo = bandNo else: myBandNo = myBands[i] myval = gdal_array.BandReadAsArray( myFiles[i].GetRasterBand(myBandNo), xoff=myX, yoff=myY, win_xsize=nXValid, win_ysize=nYValid) if myval is None: raise Exception( 'Input block reading failed from filename %s' % filename[i]) # fill in nodata values if myNDV[i] is not None: # myNDVs is a boolean buffer. # a cell equals to 1 if there is NDV in any of the corresponding cells in input raster bands. if myNDVs is None: # this is the first band that has NDV set. we initializes myNDVs to a zero buffer # as we didn't see any NDV value yet. myNDVs = numpy.zeros(myBufSize) myNDVs.shape = (nYValid, nXValid) myNDVs = 1 * numpy.logical_or(myNDVs == 1, myval == myNDV[i]) # add an array of values for this block to the eval namespace if Alpha in myAlphaFileLists: val_lists[Alpha].append(myval) else: local_namespace[Alpha] = myval myval = None for lst in myAlphaFileLists: local_namespace[lst] = val_lists[lst] # try the calculation on the array blocks calc = opts.calc[bandNo - 1 if len(opts.calc) > 1 else 0] try: myResult = eval(calc, global_namespace, local_namespace) except: print("evaluation of calculation %s failed" % (calc)) raise # Propagate nodata values (set nodata cells to zero # then add nodata value to these cells). if myNDVs is not None and myOutNDV is not None: myResult = ( (1 * (myNDVs == 0)) * myResult) + (myOutNDV * myNDVs) elif not isinstance(myResult, numpy.ndarray): myResult = numpy.ones((nYValid, nXValid)) * myResult # write data block to the output file myOutB = myOut.GetRasterBand(bandNo) if gdal_array.BandWriteArray( myOutB, myResult, xoff=myX, yoff=myY) != 0: raise Exception('Block writing failed') myOutB = None # write to band # remove temp files for idx, tempFile in enumerate(myTempFileNames): myFiles[idx] = None os.remove(tempFile) gdal.ErrorReset() myOut.FlushCache() if gdal.GetLastErrorMsg() != '': raise Exception('Dataset writing failed') if not opts.quiet: print("100 - Done") return myOut
def is_supported_format(self, filename): if base.is_path_like(filename): ext = base.get_extension().lower() return ext in self.get_supported_extenstions() return False