def raster_overlap(file_A, file_B, outpath, NoData_A=None, NoData_B=None): """ Finds overlaping area between two raster images. this function examines two images and outputs a raster identifying pixels where both rasters have non-NoData values. Output raster has 1's where both images have data and 0's where one or both images are missing data. inputs: file_A the first file file_B the second file outpath the output filename for the desired output. must end in ".tif" NoData_A the NoData value of file A NoData_B the NoData value of file B This function automatically invokes clip_and_snap null_define """ if not is_rast(file_A) or not is_rast(file_B): raise Exception(' both inputs must be rasters!') # load the rasters as numpy arays. a, metaA = to_numpy(file_A) b, metaB = to_numpy(file_B) # set no_datas if NoData_A is None: NoData_A = metaA.NoData_Value if NoData_B is None: NoData_B = metaB.NoData_Value # spatially match the rasters print('preparing input rasters!') clip_and_snap(file_A, file_B, outpath.replace(".shp", ".tif"), NoData_B) # reload the rasters as numpy arrays now that spatial matching is done a, metaA = to_numpy(file_A) b, metaB = to_numpy(file_B) # create work matrix and find the overlap print('Finding overlaping pixels!') Workmatrix = a.mask + b.mask Workmatrix = Workmatrix.astype('uint8') Workmatrix[Workmatrix == 1] = 2 print('Saving overlap file!') metaA.numpy_datatype = 'uint8' from_numpy(Workmatrix, metaA, outpath.replace(".shp", ".tif"), NoData_Value=2) arcpy.RasterToPolygon_conversion(outpath.replace(".shp", ".tif"), outpath.replace(".tif", ".shp"), 'NO_SIMPLIFY') return metaA, metaB
def raster_overlap(file_A, file_B, outpath, NoData_A = None, NoData_B = None): """ Finds overlaping area between two raster images. this function examines two images and outputs a raster identifying pixels where both rasters have non-NoData values. Output raster has 1's where both images have data and 0's where one or both images are missing data. :param file_A: the first file :param file_B: the second file :param outpath: the output filename for the desired output. must end in ".tif" :param NoData_A: the NoData value of file A :param NoData_B: the NoData value of file B :return outpath: filepath to raster created by this function. This function automatically invokes * clip_and_snap * null_define """ if not is_rast(file_A) or not is_rast(file_B): raise Exception('both inputs must be rasters!') # load the rasters as numpy arays. a, metaA = to_numpy(file_A) b, metaB = to_numpy(file_B) # set no_datas if NoData_A is None: NoData_A = metaA.NoData_Value if NoData_B is None: NoData_B = metaB.NoData_Value # spatially match the rasters print('preparing input rasters!') clip_and_snap(file_A, file_B, outpath.replace(".shp",".tif"), NoData_B) # reload the rasters as numpy arrays now that spatial matching is done a, metaA = to_numpy(file_A) b, metaB = to_numpy(file_B) # create work matrix and find the overlap print('Finding overlaping pixels!') Workmatrix = a.mask + b.mask Workmatrix = Workmatrix.astype('uint8') Workmatrix[Workmatrix == 1] = 2 print('Saving overlap file!') metaA.numpy_datatype = 'uint8' from_numpy(Workmatrix, metaA, outpath.replace(".shp",".tif"), NoData_Value = 2) arcpy.RasterToPolygon_conversion(outpath.replace(".shp",".tif"), outpath.replace(".tif",".shp"), 'NO_SIMPLIFY') return outpath
def null_set_range(rastlist, high_thresh = None, low_thresh = None, NoData_Value = None): """ Changes values within a certain range to NoData similar to raster.null_define, but can take an entire range of values to set to NoData. useful in filtering obviously erroneous high or low values from a raster dataset. inputs: rastlist list of files for which to set NoData values. easily created with "core.list_files" function high_thresh will set all values above this to NoData low_thresh will set all values below this to NoData """ # sanitize filelist input rastlist = enf_rastlist(rastlist) # iterate through each file in the filelist and set nodata values for rastname in rastlist: #load raster as numpy array and save spatial referencing. rast, meta = to_numpy(rastname) if not NoData_Value == None: NoData_Value = meta.NoData_Value if not high_thresh == None: rast[rast >= high_thresh] = NoData_Value if not low_thresh == None: rast[rast <= low_thresh] = NoData_Value from_numpy(rast, meta, rastname) arcpy.SetRasterProperties_management(rastname, data_type="#",statistics="#", stats_file = "#", nodata = "1 " + str(NoData_Value)) return
def new_mosaic(rasterpaths, output_path, mosaic_method=None, cell_size=None, number_of_bands=None): """ Simply creates a new raster dataset mosaic of input rasters by wrapping the arcpy.MosaicToNewRaster_management function. learn more about the fields here http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#//001700000098000000 :param rasterpaths: list of complete filepaths to raster data to mosaic :param output_path: place to save new mosaic raster dataset :param mosaic_method: options are "FIRST", "LAST", "BLEND", "MEAN", "MINIMUM","MAXIMUM" :param cell_size: of format "[cellwidth] [cellheight]" in the appropriate linear units, usually meters. :return output_path: returns filepath to new file, same as input ``output_path`` """ # set up input parameters if mosaic_method is None: mosaic_method = "FIRST" if cell_size is not None: print("using custom cell size of '{0}'".format(cell_size)) if number_of_bands is None: number_of_bands = 1 rasterpaths = enf_rastlist(rasterpaths) # get some metadata about the first raster in the mosaic numpy, meta = to_numpy(rasterpaths[0]) # check output directories and set up inputs for arcpy function outdir, outname = os.path.split(output_path) if not os.path.exists(outdir): os.makedirs(outdir) arcpy.MosaicToNewRaster_management( rasterpaths, outdir, outname, None, # coordinate system meta.pixel_type, cell_size, str(number_of_bands), mosaic_method=mosaic_method) print("Created raster mosaic at {0}".format(output_path)) return output_path
def new_mosaic(rasterpaths, output_path, mosaic_method = None, cell_size = None, number_of_bands = None): """ Simply creates a new raster dataset mosaic of input rasters by wrapping the arcpy.MosaicToNewRaster_management function. learn more about the fields here http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#//001700000098000000 :param rasterpaths: list of complete filepaths to raster data to mosaic :param output_path: place to save new mosaic raster dataset :param mosaic_method: options are "FIRST", "LAST", "BLEND", "MEAN", "MINIMUM","MAXIMUM" :param cell_size: of format "[cellwidth] [cellheight]" in the appropriate linear units, usually meters. :return output_path: returns filepath to new file, same as input ``output_path`` """ # set up input parameters if mosaic_method is None: mosaic_method = "FIRST" if cell_size is not None: print("using custom cell size of '{0}'".format(cell_size)) if number_of_bands is None: number_of_bands = 1 rasterpaths = enf_rastlist(rasterpaths) # get some metadata about the first raster in the mosaic numpy, meta = to_numpy(rasterpaths[0]) # check output directories and set up inputs for arcpy function outdir, outname = os.path.split(output_path) if not os.path.exists(outdir): os.makedirs(outdir) arcpy.MosaicToNewRaster_management(rasterpaths, outdir, outname, None, # coordinate system meta.pixel_type, cell_size, str(number_of_bands), mosaic_method = mosaic_method) print("Created raster mosaic at {0}".format(output_path)) return output_path
def null_set_range(rastlist, high_thresh=None, low_thresh=None, NoData_Value=None): """ Changes values within a certain range to NoData. similar to ``raster.null_define``, but can take an entire range of values to set to NoData. useful in filtering obviously erroneous high or low values from a raster dataset. :param rastlist: list of rasters for which to set no dta values :param high_thresh: will set all values above this to NoData :param low_thresh: will set all values below this to NoData :return rastlist: list of all rasters modified by this function """ # sanitize filelist input rastlist = enf_rastlist(rastlist) # iterate through each file in the filelist and set nodata values for rastname in rastlist: #load raster as numpy array and save spatial referencing. rast, meta = to_numpy(rastname) if not NoData_Value is None: NoData_Value = meta.NoData_Value if not high_thresh is None: rast[rast >= high_thresh] = NoData_Value if not low_thresh is None: rast[rast <= low_thresh] = NoData_Value from_numpy(rast, meta, rastname) try: arcpy.SetRasterProperties_management(rastname, data_type="#", statistics="#", stats_file="#", nodata="1 " + str(NoData_Value)) except RuntimeError: print("failed to set nodata in {0}".format(rastname)) return
def null_set_range(rastlist, high_thresh=None, low_thresh=None, NoData_Value=None): """ Changes values within a certain range to NoData similar to raster.null_define, but can take an entire range of values to set to NoData. useful in filtering obviously erroneous high or low values from a raster dataset. inputs: rastlist list of files for which to set NoData values. easily created with "core.list_files" function high_thresh will set all values above this to NoData low_thresh will set all values below this to NoData """ # sanitize filelist input rastlist = enf_rastlist(rastlist) # iterate through each file in the filelist and set nodata values for rastname in rastlist: #load raster as numpy array and save spatial referencing. rast, meta = to_numpy(rastname) if not NoData_Value == None: NoData_Value = meta.NoData_Value if not high_thresh == None: rast[rast >= high_thresh] = NoData_Value if not low_thresh == None: rast[rast <= low_thresh] = NoData_Value from_numpy(rast, meta, rastname) arcpy.SetRasterProperties_management(rastname, data_type="#", statistics="#", stats_file="#", nodata="1 " + str(NoData_Value)) return
def null_set_range(rastlist, high_thresh = None, low_thresh = None, NoData_Value = None): """ Changes values within a certain range to NoData. similar to ``raster.null_define``, but can take an entire range of values to set to NoData. useful in filtering obviously erroneous high or low values from a raster dataset. :param rastlist: list of rasters for which to set no dta values :param high_thresh: will set all values above this to NoData :param low_thresh: will set all values below this to NoData :return rastlist: list of all rasters modified by this function """ # sanitize filelist input rastlist = enf_rastlist(rastlist) # iterate through each file in the filelist and set nodata values for rastname in rastlist: #load raster as numpy array and save spatial referencing. rast, meta = to_numpy(rastname) if not NoData_Value is None: NoData_Value = meta.NoData_Value if not high_thresh is None: rast[rast >= high_thresh] = NoData_Value if not low_thresh is None: rast[rast <= low_thresh] = NoData_Value from_numpy(rast, meta, rastname) try: arcpy.SetRasterProperties_management(rastname, data_type = "#", statistics = "#", stats_file = "#", nodata = "1 " + str(NoData_Value)) except RuntimeError: print("failed to set nodata in {0}".format(rastname)) return
# check output directories and set up inputs for arcpy function outdir, outname = os.path.split(output_path) if not os.path.exists(outdir): os.makedirs(outdir) arcpy.MosaicToNewRaster_management(rasterpaths, outdir, outname, None, # coordinate system meta.pixel_type, cell_size, str(number_of_bands), mosaic_method = mosaic_method) print("Created raster mosaic at {0}".format(output_path)) return output_path if __name__ == "__main__": adir = r"C:\Users\jwely\Desktop\Team_Projects\2015_sumer_CO_water\LiDAR_Format_Trial" outpath = os.path.join(adir, "mosaic", "test_mosaic.tif") new_mosaic(adir, outpath, mosaic_method = "FIRST" ) rast, meta = to_numpy(outpath) rast.data[rast.data == numpy.nan] = 0 rast.data[(2452 >= rast.data) & (rast.data >= 2450)] = numpy.nan rast.data[(2430 >= rast.data) & (rast.data >= 2428)] = numpy.nan rast.data[(2350 >= rast.data) & (rast.data >= 2348)] = numpy.nan from_numpy(rast, meta, outpath.replace(".tif","_gaps.tif"))
def spatially_match(snap_raster, rasterlist, outdir, NoData_Value=False, resamp_type=False): """ Prepares input rasters for further numerical processing This function simply ensures all rasters in "rasterlist" are identically projected and have the same cell size, then calls the raster.clip_and_snap function to ensure that the cells are perfectly coincident and that the total spatial extents of the images are identical, even when NoData values are considered. This is useful because it allows the two images to be passed on for numerical processing as nothing more than matrices of values, and the user can be sure that any index in any matrix is exactly coincident with the same index in any other matrix. This is especially important to use when comparing different datasets from different sources outside arcmap, for example MODIS and Landsat data with an ASTER DEM. inputs: snap_raster raster to which all other images will be snapped rasterlist list of rasters, a single raster, or a directory full of tiffs which will be clipped to the extent of "snap_raster" and aligned such that the cells are perfectly coincident. outdir the output directory to save newly created spatially matched tifs. resamp_type The resampling type to use if images are not identical cell sizes. "NEAREST","BILINEAR",and "CUBIC" are the most common. this function automatically invokes clip_and_snap project_resample """ # import modules and sanitize inputs tempdir = os.path.join(outdir, 'temp') if not os.path.isdir(outdir): os.makedirs(outdir) if not os.path.isdir(tempdir): os.makedirs(tempdir) rasterlist = enf_rastlist(rasterlist) core.exists(snap_raster) usetemp = False # set the snap raster environment in arcmap. arcpy.env.snapRaster = snap_raster print('Loading snap raster {0}'.format(snap_raster)) _, snap_meta = to_numpy(snap_raster) print('Bounds of rectangle to define boundaries: [{0}]'.format( snap_meta.rectangle)) # for every raster in the raster list, snap rasters and clip. for rastname in rasterlist: _, meta = to_numpy(rastname) head, tail = os.path.split(rastname) if snap_meta.projection.projectionName != meta.projection.projectionName: print('Projection discrepancy found. Reprojecting...') project_resample(rastname, snap_raster, tempdir, resamp_type) tempname = core.create_outname(tempdir, tail, "p") usetemp = True # define an output name and run the Clip_ans_Snap_Raster function on formatted tifs outname = core.create_outname(outdir, rastname, "sm") # if a temporary file was created in previous steps, use that one for clip and snap if usetemp: clip_and_snap(snap_raster, tempname, outname, NoData_Value) else: clip_and_snap(snap_raster, rastname, outname, NoData_Value) print('Finished matching raster {0}'.format(rastname)) return
def clip_and_snap(snap_raster, rastname, outname, NoData_Value = None): """ Ensures perfect coincidence between a snap_raster and any input rasters This script is primarily intended for calling by the "raster.spatially_match" function but may be called independently. it is designed to input a reference image and a working image. The working image must be in exactly the same projection and spatial resolution as the reference image. This script will simply ensure the tif files are perfectly coincident, and that the total image extents are identical. This is important when performing numpy manipulations on matrices derived from different datasets manipulated in different ways to ensure alignment. This script makes modifications to the original raster file, so save a backup if you are unsure how to use this. inputs: snap_raster filepath and name of reference raster whos extent will be taken on by the input rastername rastname name of raster which should be snapped to the snap_raster NoData_Value Value desired to represent NoData in the saved image. outputs: snap_meta metadata of the snap_raster file as output by to_numpy meta metadata of the rastername file as output by to_numpy """ # grab metadata for rastname _,snap_meta = to_numpy(snap_raster) _,meta = to_numpy(rastname) if NoData_Value is None: NoData_Value = meta.NoData_Value head, tail = os.path.split(outname) tempdir = os.path.join(head, 'temp') if not os.path.isdir(tempdir): os.makedirs(tempdir) # set the snap raster environment in arcmap arcpy.env.snapRaster = snap_raster # remove data that is outside the bounding box and snap the image print("Clipping {0}".format(rastname)) tempout = os.path.join(tempdir,'tempclip.tif') try: arcpy.Clip_management(rastname, snap_meta.rectangle, tempout, "#", "#", "NONE", "MAINTAIN_EXTENT") except: arcpy.Clip_management(rastname, snap_meta.rectangle, tempout, "#", "#", "NONE") # load the newly clipped raster, find any residual offsets (usually a single pixel or two) raster, meta = to_numpy(tempout) xloff = int(round((meta.Xmin - snap_meta.Xmin)/meta.cellWidth,0)) yloff = int(round((meta.Ymin - snap_meta.Ymin)/meta.cellHeight,0)) xhoff = int(round((meta.Xmax - snap_meta.Xmax)/meta.cellWidth,0)) yhoff = int(round((meta.Ymax - snap_meta.Ymax)/meta.cellHeight,0)) # plop the snapped raster into the new output raster, alter the metadata, and save it meta.Xmin = snap_meta.Xmin meta.Ymin = snap_meta.Ymin meta.Xmax = snap_meta.Xmax meta.Ymax = snap_meta.Ymax newraster = raster[(-yloff) : (meta.Ysize - yhoff), (-xloff) : (meta.Xsize - xhoff)] from_numpy(newraster, meta, outname, NoData_Value) try: shutil.rmtree(tempdir) except: pass return snap_meta, meta
outdir, outname = os.path.split(output_path) if not os.path.exists(outdir): os.makedirs(outdir) arcpy.MosaicToNewRaster_management( rasterpaths, outdir, outname, None, # coordinate system meta.pixel_type, cell_size, str(number_of_bands), mosaic_method=mosaic_method) print("Created raster mosaic at {0}".format(output_path)) return output_path if __name__ == "__main__": adir = r"C:\Users\jwely\Desktop\Team_Projects\2015_sumer_CO_water\LiDAR_Format_Trial" outpath = os.path.join(adir, "mosaic", "test_mosaic.tif") new_mosaic(adir, outpath, mosaic_method="FIRST") rast, meta = to_numpy(outpath) rast.data[rast.data == numpy.nan] = 0 rast.data[(2452 >= rast.data) & (rast.data >= 2450)] = numpy.nan rast.data[(2430 >= rast.data) & (rast.data >= 2428)] = numpy.nan rast.data[(2350 >= rast.data) & (rast.data >= 2348)] = numpy.nan from_numpy(rast, meta, outpath.replace(".tif", "_gaps.tif"))
def degree_days(T_base, Max, Min, NoData_Value, outpath=False, roof=False, floor=False): """ Inputs rasters for maximum and minimum temperatures, calculates Growing Degree Days this function is built to perform the common degree day calculation on either a pair of raster filepaths, a pair of numpy arrays It requires, at minimum a maximum temperature value, a minimum temperature value, and a base temperature. This equation could also be used to calculate Chill hours or anything similar. The equation is ``[(Max+Min)/2 + T_base]`` where values in Max which are greater than roof are set equal to roof where values in Min which are less than floor are set equal to floor consult [https://en.wikipedia.org/wiki/Growing_degree-day] for more information. :param T_base: base temperature to ADD, be mindful of sign convention. :param Max: filepath, numpy array, or list of maximum temperatures :param Min: filepath, numpy array, or list of minimum temperatures :param NoData_Value: values to ignore (must be int or float) :param outpath: filepath to which output should be saved. Only works if Max and Min inputs are raster filepaths with spatial referencing. :param roof: roof value above which Max temps do not mater :param floor: floor value below which Min temps do not mater :return deg_days: a numpy array of the output degree_days """ #FIXME: doesn't fit style guide. does not operate in batch and return list of output filepaths output_filelist = [] # format numerical inputs as floating point values T_base = float(T_base) if roof: roof = float(roof) if floor: floor = float(floor) # Determine the type of input and convert to useful format for calculation # acceptable input formats are filepaths to rasters, numpy arrays, or lists. if type(Max) is list and type(Min) is list: # if the first entry in a list is a string, assume it is a filename that has # been placed into a list. if type(Max[0]) is str and type(Min[0]) is str: Max = Max[0] Min = Min[0] # load in the min and max files. highs, meta = to_numpy(Max) lows, meta = to_numpy(Min) print('Found spatially referenced image pair!') else: highs = numpy.array(Max) lows = numpy.array(Min) # if they are already numpy arrays elif type(Max) is numpy.ndarray: highs = Max lows = Min else: raise Exception("invalid inputs!") # Begin to perform the degree day calculations # apply roof and floor corrections if they have been specified if roof: highs[highs >= roof] = roof if floor: lows[lows <= floor] = floor # find the shapes of high and low arrays xsh, ysh = highs.shape xsl, ysl = lows.shape # only continue if min and max arrays have the same shape if xsh == xsl and ysh == ysl: # set empty degree day matrix deg_days = numpy.zeros((xsh, ysh)) # perform the calculation for x in range(xsh): for y in range(ysh): if round(highs[x, y] / NoData_Value, 10) != 1 and round( lows[x, y] / NoData_Value, 10) != 1: deg_days[x, y] = ((highs[x, y] + lows[x, y]) / 2) + T_base else: deg_days[x, y] = NoData_Value # print error if the arrays are not the same size else: print('Images are not the same size!, Check inputs!') return False # if an output path was specified, save it with the spatial referencing information. if outpath and type(Max) is str and type(Min) is str: from_numpy(deg_days, meta, outpath) print('Output saved at : ' + outpath) return deg_days
def many_stats(rasterlist, outdir, outname, saves=['AVG', 'NUM', 'STD', 'SUM'], low_thresh=None, high_thresh=None, numtype='float32'): """ Take statitics across many input rasters this function is used to take statistics on large groups of rasters with identical spatial extents. Similar to Rolling_Raster_Stats Inputs: rasterlist list of raster filepaths for which to take statistics outdir Directory where output should be stored. saves which statistics to save in a raster. In addition to the options supported by Defaults to all three ['AVG','NUM','STD']. low_thresh values below low_thresh are assumed erroneous and set to NoData high_thresh values above high_thresh are assumed erroneous and set to NoData. numtype type of numerical value. defaults to 32bit float. """ if not os.path.isdir(outdir): os.makedirs(outdir) rasterlist = enf_rastlist(rasterlist) # build the empty numpy array based on size of first raster temp_rast, metadata = to_numpy(rasterlist[0]) xs, ys = temp_rast.shape zs = len(rasterlist) rast_3d = numpy.zeros((xs, ys, zs)) metadata.NoData_Value = numpy.nan # open up the initial figure rastfig = raster_fig(temp_rast) # populate the 3d matrix with values from all rasters for i, raster in enumerate(rasterlist): # print a status and open a figure print('working on file {0}'.format(os.path.basename(raster))) new_rast, new_meta = to_numpy(raster, numtype) new_rast = new_rast.data if not new_rast.shape == (xs, ys): print new_rast.shape # set rasters to have 'nan' NoData_Value if new_meta.NoData_Value != metadata.NoData_Value: new_rast[new_rast == new_meta.NoData_Value] = metadata.NoData_Value # set values outside thresholds to nodata values if not low_thresh == None: new_rast[new_rast < low_thresh] = metadata.NoData_Value if not high_thresh == None: new_rast[new_rast > high_thresh] = metadata.NoData_Value new_rast = numpy.ma.masked_array(new_rast, numpy.isnan(new_rast)) # display a figure rastfig.update_fig(new_rast) rast_3d[:, :, i] = new_rast # build up our statistics by masking nan values and performin matrix opperations rastfig.close_fig() rast_3d_masked = numpy.ma.masked_array(rast_3d, numpy.isnan(rast_3d)) if "AVG" in saves: avg_rast = numpy.mean(rast_3d_masked, axis=2) avg_rast = numpy.array(avg_rast) rastfig = raster_fig(avg_rast, title="Average") avg_name = core.create_outname(outdir, outname, 'AVG', 'tif') print("Saving AVERAGE output raster as {0}".format(avg_name)) from_numpy(avg_rast, metadata, avg_name) rastfig.close_fig() del avg_rast if "STD" in saves: std_rast = numpy.std(rast_3d_masked, axis=2) std_rast = numpy.array(std_rast) rastfig = raster_fig(std_rast, title="Standard Deviation") std_name = core.create_outname(outdir, outname, 'STD', 'tif') print( "Saving STANDARD DEVIATION output raster as {0}".format(std_name)) from_numpy(std_rast, metadata, std_name) rastfig.close_fig() del std_rast if "NUM" in saves: num_rast = (numpy.zeros( (xs, ys)) + zs) - numpy.sum(rast_3d_masked.mask, axis=2) num_rast = numpy.array(num_rast) rastfig = raster_fig(num_rast, title="Good pixel count (NUM)") rastfig.close_fig() num_name = core.create_outname(outdir, outname, 'NUM', 'tif') print("Saving NUMBER output raster as {0}".format(num_name)) from_numpy(num_rast, metadata, num_name) rastfig.close_fig() del num_rast if "SUM" in saves: sum_rast = numpy.sum(rast_3d_masked, axis=2) sum_rast = numpy.array(sum_rast) rastfig = raster_fig(sum_rast, title="Good pixel count (NUM)") rastfig.close_fig() sum_name = core.create_outname(outdir, outname, 'SUM', 'tif') print("Saving NUMBER output raster as {0}".format(sum_name)) from_numpy(sum_rast, metadata, sum_name) rastfig.close_fig() del sum_rast rastfig.close_fig() return
def degree_days_accum(rasterlist, critical_values=None, outdir=None): """ Accumulates degree days in a time series rasterlist This function is the logical successor to calc.degree_days. Input a list of rasters containing daily data to be accumulated. Output raster for a given day will be the sum total of the input raster for that day and all preceding days. The last output raster in a years worth of data (image 356) would be the sum of all 365 images. The 25th output raster would be a sum of the first 25 days. Critical value rasters will also be created. Usefull for example: we wish to know on what day of our 365 day sequence every pixel hits a value of 100. Input 100 as a critical value and that output raster will be generated. :param rasterlist: list of files, or directory containing rasters to accumulate :param critical_values: Values at which the user wishes to know WHEN the total accumulation value reaches this point. For every critical value, an output raster will be created. This raster contains integer values denoting the index number of the file at which the value was reached. This input must be a list of ints or floats, not strings. :param outdir: Desired output directory for all output files. :return output_filelist: a list of all files created by this function. """ output_filelist = [] rasterlist = enf_rastlist(rasterlist) if critical_values: critical_values = core.enf_list(critical_values) # critical values of zero are problematic, so replace it with a small value. if 0 in critical_values: critical_values.remove(0) critical_values.append(0.000001) if outdir is not None and not os.path.exists(outdir): os.makedirs(outdir) for i, rast in enumerate(rasterlist): image, meta = to_numpy(rast, "float32") xs, ys = image.shape if i == 0: Sum = numpy.zeros((xs, ys)) Crit = numpy.zeros((len(critical_values), xs, ys)) if image.shape == Sum.shape: # only bother to proceed if at least one pixel is positive if numpy.max(image) >= 0: for x in range(xs): for y in range(ys): if image[x, y] >= 0: Sum[x, y] = Sum[x, y] + image[x, y] if critical_values is not None: for z, critical_value in enumerate( critical_values): if Sum[x, y] >= critical_value and Crit[z, x, y] == 0: Crit[z, x, y] = i else: print "Encountered an image of incorrect size! Skipping it!" Sum = Sum.astype('float32') outname = core.create_outname(outdir, rast, "Accum") from_numpy(Sum, meta, outname) output_filelist.append(outname) del image # output critical accumulation rasters using some data from the last raster in previous loop Crit = Crit.astype('int16') crit_meta = meta crit_meta.NoData_Value = 0 head, tail = os.path.split( outname) # place these in the last raster output location for z, critical_value in enumerate(critical_values): outname = os.path.join( head, "Crit_Accum_Index_Val-{0}.tif".format(str(critical_value))) print("Saving {0}".format(outname)) from_numpy(Crit[z, :, :], crit_meta, outname) return output_filelist
def clip_and_snap(snap_raster, rastname, outname, NoData_Value=None): """ Ensures perfect coincidence between a snap_raster and any input rasters This script is primarily intended for calling by the "raster.spatially_match" function but may be called independently. it is designed to input a reference image and a working image. The working image must be in exactly the same projection and spatial resolution as the reference image. This script will simply ensure the tif files are perfectly coincident, and that the total image extents are identical. This is important when performing numpy manipulations on matrices derived from different datasets manipulated in different ways to ensure alignment. This script makes modifications to the original raster file, so save a backup if you are unsure how to use this. :param snap_raster: filepath and name of reference raster whos extent will be taken on by the input rastername. :param rastname: name of raster which should be snapped to the snap_raster :param NoData_Value: Value desired to represent NoData in the saved image. :return snap_meta: metadata of the snap_raster file as output by to_numpy :return meta: metadata of the rastername file as output by to_numpy """ # grab metadata for rastname _, snap_meta = to_numpy(snap_raster) _, meta = to_numpy(rastname) if NoData_Value is None: NoData_Value = meta.NoData_Value head, tail = os.path.split(outname) tempdir = os.path.join(head, 'temp') if not os.path.isdir(tempdir): os.makedirs(tempdir) # set the snap raster environment in arcmap arcpy.env.snapRaster = snap_raster # remove data that is outside the bounding box and snap the image print("Clipping {0}".format(rastname)) tempout = os.path.join(tempdir, 'tempclip.tif') try: arcpy.Clip_management(rastname, snap_meta.rectangle, tempout, "#", "#", "NONE", "MAINTAIN_EXTENT") except: arcpy.Clip_management(rastname, snap_meta.rectangle, tempout, "#", "#", "NONE") # load the newly clipped raster, find any residual offsets (usually a single pixel or two) raster, meta = to_numpy(tempout) xloff = int(round((meta.Xmin - snap_meta.Xmin) / meta.cellWidth, 0)) yloff = int(round((meta.Ymin - snap_meta.Ymin) / meta.cellHeight, 0)) xhoff = int(round((meta.Xmax - snap_meta.Xmax) / meta.cellWidth, 0)) yhoff = int(round((meta.Ymax - snap_meta.Ymax) / meta.cellHeight, 0)) # plop the snapped raster into the new output raster, alter the metadata, and save it meta.Xmin = snap_meta.Xmin meta.Ymin = snap_meta.Ymin meta.Xmax = snap_meta.Xmax meta.Ymax = snap_meta.Ymax newraster = raster[(-yloff):(meta.Ysize - yhoff), (-xloff):(meta.Xsize - xhoff)] from_numpy(newraster, meta, outname, NoData_Value) try: shutil.rmtree(tempdir) except: pass return snap_meta, meta
def gap_fill_interpolate(in_rasterpath, out_rasterpath, model=None, max_cell_dist=None, min_points=None): """ Fills gaps in raster data by spatial kriging interpolation. This should only be used to fill small gaps in continuous datasets (like a DEM), and in instances where it makes sense. This function creates a feature class layer of points where pixels are not NoData, then performs a "kriging" interpolation on the point data to rebuild a uniform grid with a value at every location, thus filling gaps. WARNING: This script is processing intensive and may take a while to run even for modestly sized datasets. :param in_rasterpath: input filepath to raster to fill gaps :param out_rasterpath: filepath to store output gap filled raster in :param model: type of kriging model to run, options include "SPHERICAL", "CIRCULAR", "EXPONENTIAL", "GAUSSIAN", and "LINEAR" :param max_cell_dist: The maximum number of cells to interpolate between, data gaps which do not have at least "min_points" points within this distance will not be filled. :param min_points: Minimum number of surrounding points to use in determining value at missing cell. :return out_rasterpath: Returns path to file created by this function """ # check inputs if not is_rast(in_rasterpath): raise Exception( "input raster path {0} is invalid!".format(in_rasterpath)) if max_cell_dist is None: max_cell_dist = 10 if min_points is None: min_points = 4 if model is None: model = "SPHERICAL" # set environments arcpy.env.overwriteOutput = True arcpy.env.snapRaster = in_rasterpath arcpy.CheckOutExtension("Spatial") # make a point shapefile version of input raster print("Creating point grid from input raster") head, tail = os.path.split(in_rasterpath) shp_path = core.create_outname(head, tail, "shp", "shp") dbf_path = shp_path.replace(".shp", ".dbf") field = "GRID_CODE" arcpy.RasterToPoint_conversion(in_rasterpath, shp_path, "VALUE") # find the bad rows who GRID_CODE is 1, these should be NoData print("Finding points with NoData entries") bad_row_FIDs = [] rows = arcpy.UpdateCursor(dbf_path) for row in rows: grid_code = getattr(row, field) if grid_code == 1: bad_row_FIDs.append(row.FID) del rows # go back through the list and perform the deletions numbad = len(bad_row_FIDs) print("Deleting {0} points with NoData values".format(numbad)) rows = arcpy.UpdateCursor(dbf_path) for i, row in enumerate(rows): if row.FID in bad_row_FIDs: rows.deleteRow(row) # set up the parameters for kriging print("Setting up for kriging") _, meta = to_numpy(in_rasterpath) model = model cell_size = meta.cellHeight # from input raster lagSize = None majorRange = None partialSill = None nugget = None distance = float(cell_size) * float(max_cell_dist) # fn input min_points = min_points # fn input a = arcpy.sa.KrigingModelOrdinary() kmodel = arcpy.sa.KrigingModelOrdinary("SPHERICAL", lagSize=lagSize, majorRange=majorRange, partialSill=partialSill, nugget=nugget) kradius = arcpy.sa.RadiusFixed(distance=distance, minNumberOfPoints=min_points) # execute kriging print("Performing interpolation by kriging, this may take a while!") outkriging = arcpy.sa.Kriging(shp_path, field, kmodel, cell_size=cell_size, search_radius=kradius) outkriging.save(out_rasterpath) return out_rasterpath
def many_stats(rasterlist, outdir, outname, saves = ['AVG','NUM','STD','SUM'], low_thresh = None, high_thresh = None, numtype = 'float32'): """ Take statitics across many input rasters this function is used to take statistics on large groups of rasters with identical spatial extents. Similar to Rolling_Raster_Stats Inputs: rasterlist list of raster filepaths for which to take statistics outdir Directory where output should be stored. saves which statistics to save in a raster. In addition to the options supported by Defaults to all three ['AVG','NUM','STD']. low_thresh values below low_thresh are assumed erroneous and set to NoData high_thresh values above high_thresh are assumed erroneous and set to NoData. numtype type of numerical value. defaults to 32bit float. """ if not os.path.isdir(outdir): os.makedirs(outdir) rasterlist = enf_rastlist(rasterlist) # build the empty numpy array based on size of first raster temp_rast, metadata = to_numpy(rasterlist[0]) xs, ys = temp_rast.shape zs = len(rasterlist) rast_3d = numpy.zeros((xs,ys,zs)) metadata.NoData_Value = numpy.nan # open up the initial figure rastfig = raster_fig(temp_rast) # populate the 3d matrix with values from all rasters for i, raster in enumerate(rasterlist): # print a status and open a figure print('working on file {0}'.format(os.path.basename(raster))) new_rast, new_meta = to_numpy(raster, numtype) new_rast = new_rast.data if not new_rast.shape == (xs, ys): print new_rast.shape # set rasters to have 'nan' NoData_Value if new_meta.NoData_Value != metadata.NoData_Value: new_rast[new_rast == new_meta.NoData_Value] = metadata.NoData_Value # set values outside thresholds to nodata values if not low_thresh == None: new_rast[new_rast < low_thresh] = metadata.NoData_Value if not high_thresh == None: new_rast[new_rast > high_thresh] = metadata.NoData_Value new_rast = numpy.ma.masked_array(new_rast, numpy.isnan(new_rast)) # display a figure rastfig.update_fig(new_rast) rast_3d[:,:,i] = new_rast # build up our statistics by masking nan values and performin matrix opperations rastfig.close_fig() rast_3d_masked = numpy.ma.masked_array(rast_3d, numpy.isnan(rast_3d)) if "AVG" in saves: avg_rast = numpy.mean(rast_3d_masked, axis = 2) avg_rast = numpy.array(avg_rast) rastfig = raster_fig(avg_rast, title = "Average") avg_name = core.create_outname(outdir, outname, 'AVG', 'tif') print("Saving AVERAGE output raster as {0}".format(avg_name)) from_numpy(avg_rast, metadata, avg_name) rastfig.close_fig() del avg_rast if "STD" in saves: std_rast = numpy.std(rast_3d_masked, axis = 2) std_rast = numpy.array(std_rast) rastfig = raster_fig(std_rast, title = "Standard Deviation") std_name = core.create_outname(outdir, outname, 'STD', 'tif') print("Saving STANDARD DEVIATION output raster as {0}".format(std_name)) from_numpy(std_rast, metadata, std_name) rastfig.close_fig() del std_rast if "NUM" in saves: num_rast = (numpy.zeros((xs,ys)) + zs) - numpy.sum(rast_3d_masked.mask, axis = 2) num_rast = numpy.array(num_rast) rastfig = raster_fig(num_rast, title = "Good pixel count (NUM)") rastfig.close_fig() num_name = core.create_outname(outdir, outname, 'NUM', 'tif') print("Saving NUMBER output raster as {0}".format(num_name)) from_numpy(num_rast, metadata, num_name) rastfig.close_fig() del num_rast if "SUM" in saves: sum_rast = numpy.sum(rast_3d_masked, axis = 2) sum_rast = numpy.array(sum_rast) rastfig = raster_fig(sum_rast, title = "Good pixel count (NUM)") rastfig.close_fig() sum_name = core.create_outname(outdir, outname, 'SUM', 'tif') print("Saving NUMBER output raster as {0}".format(sum_name)) from_numpy(sum_rast, metadata, sum_name) rastfig.close_fig() del sum_rast rastfig.close_fig() return
def spatially_match(snap_raster, rasterlist, outdir, NoData_Value = False, resamp_type = False): """ Prepares input rasters for further numerical processing This function simply ensures all rasters in "rasterlist" are identically projected and have the same cell size, then calls the raster.clip_and_snap function to ensure that the cells are perfectly coincident and that the total spatial extents of the images are identical, even when NoData values are considered. This is useful because it allows the two images to be passed on for numerical processing as nothing more than matrices of values, and the user can be sure that any index in any matrix is exactly coincident with the same index in any other matrix. This is especially important to use when comparing different datasets from different sources outside arcmap, for example MODIS and Landsat data with an ASTER DEM. inputs: snap_raster raster to which all other images will be snapped rasterlist list of rasters, a single raster, or a directory full of tiffs which will be clipped to the extent of "snap_raster" and aligned such that the cells are perfectly coincident. outdir the output directory to save newly created spatially matched tifs. resamp_type The resampling type to use if images are not identical cell sizes. "NEAREST","BILINEAR",and "CUBIC" are the most common. this function automatically invokes clip_and_snap project_resample """ # import modules and sanitize inputs tempdir = os.path.join(outdir, 'temp') if not os.path.isdir(outdir): os.makedirs(outdir) if not os.path.isdir(tempdir): os.makedirs(tempdir) rasterlist = enf_rastlist(rasterlist) core.exists(snap_raster) usetemp = False # set the snap raster environment in arcmap. arcpy.env.snapRaster = snap_raster print('Loading snap raster {0}'.format(snap_raster)) _,snap_meta = to_numpy(snap_raster) print('Bounds of rectangle to define boundaries: [{0}]'.format(snap_meta.rectangle)) # for every raster in the raster list, snap rasters and clip. for rastname in rasterlist: _,meta = to_numpy(rastname) head,tail = os.path.split(rastname) if snap_meta.projection.projectionName != meta.projection.projectionName: print('Projection discrepancy found. Reprojecting...') project_resample(rastname, snap_raster, tempdir, resamp_type) tempname = core.create_outname(tempdir,tail,"p") usetemp = True # define an output name and run the Clip_ans_Snap_Raster function on formatted tifs outname = core.create_outname(outdir, rastname, "sm") # if a temporary file was created in previous steps, use that one for clip and snap if usetemp: clip_and_snap(snap_raster, tempname, outname, NoData_Value) else: clip_and_snap(snap_raster, rastname, outname, NoData_Value) print('Finished matching raster {0}'.format(rastname)) return
def gap_fill_interpolate(in_rasterpath, out_rasterpath, model = None, max_cell_dist = None, min_points = None): """ Fills gaps in raster data by spatial kriging interpolation. This should only be used to fill small gaps in continuous datasets (like a DEM), and in instances where it makes sense. This function creates a feature class layer of points where pixels are not NoData, then performs a "kriging" interpolation on the point data to rebuild a uniform grid with a value at every location, thus filling gaps. WARNING: This script is processing intensive and may take a while to run even for modestly sized datasets. :param in_rasterpath: input filepath to raster to fill gaps :param out_rasterpath: filepath to store output gap filled raster in :param model: type of kriging model to run, options include "SPHERICAL", "CIRCULAR", "EXPONENTIAL", "GAUSSIAN", and "LINEAR" :param max_cell_dist: The maximum number of cells to interpolate between, data gaps which do not have at least "min_points" points within this distance will not be filled. :param min_points: Minimum number of surrounding points to use in determining value at missing cell. :return out_rasterpath: Returns path to file created by this function """ # check inputs if not is_rast(in_rasterpath): raise Exception("input raster path {0} is invalid!".format(in_rasterpath)) if max_cell_dist is None: max_cell_dist = 10 if min_points is None: min_points = 4 if model is None: model = "SPHERICAL" # set environments arcpy.env.overwriteOutput = True arcpy.env.snapRaster = in_rasterpath arcpy.CheckOutExtension("Spatial") # make a point shapefile version of input raster print("Creating point grid from input raster") head, tail = os.path.split(in_rasterpath) shp_path = core.create_outname(head, tail, "shp", "shp") dbf_path = shp_path.replace(".shp",".dbf") field = "GRID_CODE" arcpy.RasterToPoint_conversion(in_rasterpath, shp_path, "VALUE") # find the bad rows who GRID_CODE is 1, these should be NoData print("Finding points with NoData entries") bad_row_FIDs = [] rows = arcpy.UpdateCursor(dbf_path) for row in rows: grid_code = getattr(row, field) if grid_code == 1: bad_row_FIDs.append(row.FID) del rows # go back through the list and perform the deletions numbad = len(bad_row_FIDs) print("Deleting {0} points with NoData values".format(numbad)) rows = arcpy.UpdateCursor(dbf_path) for i, row in enumerate(rows): if row.FID in bad_row_FIDs: rows.deleteRow(row) # set up the parameters for kriging print("Setting up for kriging") _, meta = to_numpy(in_rasterpath) model = model cell_size = meta.cellHeight # from input raster lagSize = None majorRange = None partialSill = None nugget = None distance = float(cell_size) * float(max_cell_dist) # fn input min_points = min_points # fn input a = arcpy.sa.KrigingModelOrdinary() kmodel = arcpy.sa.KrigingModelOrdinary("SPHERICAL", lagSize = lagSize, majorRange = majorRange, partialSill = partialSill, nugget = nugget) kradius = arcpy.sa.RadiusFixed(distance = distance, minNumberOfPoints = min_points) # execute kriging print("Performing interpolation by kriging, this may take a while!") outkriging = arcpy.sa.Kriging(shp_path, field, kmodel, cell_size = cell_size, search_radius = kradius) outkriging.save(out_rasterpath) return out_rasterpath
def degree_days(T_base, Max, Min, NoData_Value, outpath = False, roof = False, floor = False): """ Inputs rasters for maximum and minimum temperatures, calculates Growing Degree Days this function is built to perform the common degree day calculation on either a pair of raster filepaths, a pair of numpy arrays It requires, at minimum a maximum temperature value, a minimum temperature value, and a base temperature. This equation could also be used to calculate Chill hours or anything similar. The equation is ``[(Max+Min)/2 + T_base]`` where values in Max which are greater than roof are set equal to roof where values in Min which are less than floor are set equal to floor consult [https://en.wikipedia.org/wiki/Growing_degree-day] for more information. :param T_base: base temperature to ADD, be mindful of sign convention. :param Max: filepath, numpy array, or list of maximum temperatures :param Min: filepath, numpy array, or list of minimum temperatures :param NoData_Value: values to ignore (must be int or float) :param outpath: filepath to which output should be saved. Only works if Max and Min inputs are raster filepaths with spatial referencing. :param roof: roof value above which Max temps do not mater :param floor: floor value below which Min temps do not mater :return deg_days: a numpy array of the output degree_days """ #FIXME: doesn't fit style guide. does not operate in batch and return list of output filepaths output_filelist = [] # format numerical inputs as floating point values T_base = float(T_base) if roof: roof = float(roof) if floor: floor = float(floor) # Determine the type of input and convert to useful format for calculation # acceptable input formats are filepaths to rasters, numpy arrays, or lists. if type(Max) is list and type(Min) is list: # if the first entry in a list is a string, assume it is a filename that has # been placed into a list. if type(Max[0]) is str and type(Min[0]) is str: Max = Max[0] Min = Min[0] # load in the min and max files. highs, meta = to_numpy(Max) lows, meta = to_numpy(Min) print('Found spatially referenced image pair!') else: highs = numpy.array(Max) lows = numpy.array(Min) # if they are already numpy arrays elif type(Max) is numpy.ndarray: highs = Max lows = Min else: raise Exception("invalid inputs!") # Begin to perform the degree day calculations # apply roof and floor corrections if they have been specified if roof: highs[highs >= roof] = roof if floor: lows[lows <=floor] = floor # find the shapes of high and low arrays xsh, ysh = highs.shape xsl, ysl = lows.shape # only continue if min and max arrays have the same shape if xsh == xsl and ysh == ysl: # set empty degree day matrix deg_days = numpy.zeros((xsh,ysh)) # perform the calculation for x in range(xsh): for y in range(ysh): if round(highs[x,y]/NoData_Value,10) !=1 and round(lows[x,y]/NoData_Value,10) != 1: deg_days[x,y] =((highs[x,y] + lows[x,y])/2) + T_base else: deg_days[x,y] = NoData_Value # print error if the arrays are not the same size else: print('Images are not the same size!, Check inputs!') return False # if an output path was specified, save it with the spatial referencing information. if outpath and type(Max) is str and type(Min) is str: from_numpy(deg_days, meta, outpath) print('Output saved at : ' + outpath) return deg_days
def degree_days_accum(rasterlist, critical_values = None, outdir = None): """ Accumulates degree days in a time series rasterlist This function is the logical successor to calc.degree_days. Input a list of rasters containing daily data to be accumulated. Output raster for a given day will be the sum total of the input raster for that day and all preceding days. The last output raster in a years worth of data (image 356) would be the sum of all 365 images. The 25th output raster would be a sum of the first 25 days. Critical value rasters will also be created. Usefull for example: we wish to know on what day of our 365 day sequence every pixel hits a value of 100. Input 100 as a critical value and that output raster will be generated. :param rasterlist: list of files, or directory containing rasters to accumulate :param critical_values: Values at which the user wishes to know WHEN the total accumulation value reaches this point. For every critical value, an output raster will be created. This raster contains integer values denoting the index number of the file at which the value was reached. This input must be a list of ints or floats, not strings. :param outdir: Desired output directory for all output files. :return output_filelist: a list of all files created by this function. """ output_filelist = [] rasterlist = enf_rastlist(rasterlist) if critical_values: critical_values = core.enf_list(critical_values) # critical values of zero are problematic, so replace it with a small value. if 0 in critical_values: critical_values.remove(0) critical_values.append(0.000001) if outdir is not None and not os.path.exists(outdir): os.makedirs(outdir) for i, rast in enumerate(rasterlist): image, meta = to_numpy(rast,"float32") xs, ys = image.shape if i == 0: Sum = numpy.zeros((xs,ys)) Crit = numpy.zeros((len(critical_values),xs,ys)) if image.shape == Sum.shape: # only bother to proceed if at least one pixel is positive if numpy.max(image) >= 0: for x in range(xs): for y in range(ys): if image[x,y] >= 0: Sum[x,y] = Sum[x,y]+image[x,y] if critical_values is not None: for z,critical_value in enumerate(critical_values): if Sum[x,y] >= critical_value and Crit[z,x,y]==0: Crit[z,x,y] = i else: print "Encountered an image of incorrect size! Skipping it!" Sum = Sum.astype('float32') outname = core.create_outname(outdir, rast, "Accum") from_numpy(Sum, meta, outname) output_filelist.append(outname) del image # output critical accumulation rasters using some data from the last raster in previous loop Crit = Crit.astype('int16') crit_meta = meta crit_meta.NoData_Value = 0 head , tail = os.path.split(outname) # place these in the last raster output location for z, critical_value in enumerate(critical_values): outname = os.path.join(head, "Crit_Accum_Index_Val-{0}.tif".format(str(critical_value))) print("Saving {0}".format(outname)) from_numpy(Crit[z,:,:], crit_meta, outname) return output_filelist