def topographic_radiation(raw_aspect, radiation_output): """ Description: calculates 32-bit float topographic radiation Inputs: 'raw_aspect' -- an input raw aspect raster 'radiation_output' -- an output topographic radiation raster Returned Value: Returns a raster dataset on disk Preconditions: requires an input raw aspect raster """ # Import packages import arcpy from arcpy.sa import Con from arcpy.sa import Cos from arcpy.sa import Raster # Set overwrite option arcpy.env.overwriteOutput = True # Calculate topographic radiation aspect index print('\t\tCalculating topographic radiation aspect index...') numerator = 1 - Cos((3.142 / 180) * (Raster(raw_aspect) - 30)) radiation_index = numerator / 2 # Convert negative aspect values print('\t\tConverting negative aspect values...') out_raster = Con(Raster(raw_aspect) < 0, 0.5, radiation_index) out_raster.save(radiation_output)
def topographic_position(elevation_input, position_output): """ Description: calculates 32-bit float topographic position Inputs: 'elevation_input' -- an input raster digital elevation model 'relief_output' -- an output surface relief ratio raster Returned Value: Returns a raster dataset on disk Preconditions: requires an input elevation raster """ # Import packages import arcpy from arcpy.sa import FocalStatistics from arcpy.sa import NbrRectangle from arcpy.sa import Raster # Set overwrite option arcpy.env.overwriteOutput = True # Define a neighborhood variable neighborhood = NbrRectangle(5, 5, "CELL") # Calculate local mean print('\t\tCalculating local mean...') local_mean = FocalStatistics(elevation_input, neighborhood, 'MEAN', 'DATA') # Calculate topographic position print('\t\tCalculating topographic position...') out_raster = Raster(elevation_input) - local_mean out_raster.save(position_output)
def site_exposure(raw_aspect, raw_slope, exposure_output): """ Description: calculates 32-bit float site exposure Inputs: 'raw_aspect' -- an input raw aspect raster 'raw_slope' -- an input raster digital elevation model 'exposure_output' -- an output exposure raster Returned Value: Returns a raster dataset on disk Preconditions: requires an input aspect and slope raster """ # Import packages import arcpy from arcpy.sa import Cos from arcpy.sa import Divide from arcpy.sa import Minus from arcpy.sa import Raster from arcpy.sa import Times # Set overwrite option arcpy.env.overwriteOutput = True # Calculate cosine of modified aspect print('\t\tCalculating cosine of modified aspect...') cosine = Cos(Divide(Times(3.142, Minus(Raster(raw_aspect), 180)), 180)) # Calculate site exposure index and save output print('\t\tCalculating site exposure index...') out_raster = Times(Raster(raw_slope), cosine) out_raster.save(exposure_output)
def do_score(fire, year, day, p, shp): """! Calculate score @param fire Fire to calculate score for @param year Year fire is from @param day Day score is for @param p Probability raster to compare to @param shp Shapefile for actual perimeter @return None """ orig_raster = os.path.join(run_output, fire + ".tif") orig_raster = Raster(orig_raster) if os.path.exists(orig_raster) else None prob_raster = Raster(p) raster = os.path.join(run_output, os.path.splitext(os.path.basename(shp))[0] + '.tif') perim, raster = rasterize_perim(run_output, shp, year, fire, raster) if perim: target = Raster(raster) # remove the original raster used to start the simulation r = Con(IsNull(orig_raster), prob_raster, 0.0) if orig_raster is not None else prob_raster r = SetNull(r == 0.0, r) m = Con(IsNull(orig_raster), target, 0.0) if orig_raster is not None else target m = SetNull(m == 0.0, m) hits = Con(IsNull(r), 0.0, r) * Con(IsNull(m), 0.0, 1.0) misses = Con(IsNull(r), 1.0, 0.0) * Con(IsNull(m), 0.0, 1.0) false_positives = Con(IsNull(r), 0.0, r) * Con(IsNull(m), 1.0, 0.0) tp = arcpy.RasterToNumPyArray(hits, nodata_to_value=0).sum() fn = arcpy.RasterToNumPyArray(misses, nodata_to_value=0).sum() fp = arcpy.RasterToNumPyArray(false_positives, nodata_to_value=0).sum() total_score = tp / (tp + fn + fp) #~ logging.info("Scores are {} + {} + {} = {}".format(tp, fn, fp, total_score)) scores.append([fire, year, day, p, shp, tp, fn, fp, total_score])
def getNullSubstituteGrid(lccObj, inLandCoverGrid, inSubstituteGrid, nullValuesList, cleanupList, timer): # Set areas in the inSubstituteGrid to NODATA using the nullValuesList. For areas not in the nullValuesList, substitute # the grid values with those from the inLandCoverGrid LCGrid = Raster(inLandCoverGrid) subGrid = Raster(inSubstituteGrid) # find the highest value found in LCC XML file or land cover grid lccValuesDict = lccObj.values maxValue = LCGrid.maximum xmlValues = lccObj.getUniqueValueIdsWithExcludes() for v in xmlValues: if v > maxValue: maxValue = v # Add 1 to the highest value and then add it to the list of values to exclude during metric calculations valueToExclude = int(maxValue + 1) excludedValuesFrozen = lccValuesDict.getExcludedValueIds() excludedValues = [item for item in excludedValuesFrozen] excludedValues.append(valueToExclude) # build whereClause string (e.g. "VALUE" <> 11 or "VALUE" <> 12") to identify areas to substitute the valueToExclude delimitedVALUE = arcpy.AddFieldDelimiters(subGrid, "VALUE") stringStart = delimitedVALUE + " <> " stringSep = " or " + delimitedVALUE + " <> " whereClause = stringStart + stringSep.join( [str(item) for item in nullValuesList]) AddMsg(timer.split() + " Generating land cover in floodplain grid...") nullSubstituteGrid = Con(subGrid, LCGrid, valueToExclude, whereClause) return nullSubstituteGrid, excludedValues
def combineProposedWithCurrentDebit(anthroPath, uniqueProposedSubtypes): for subtype in uniqueProposedSubtypes: # Merge proposed and current feature rasters currentAnthroFeature = Raster(os.path.join(anthroPath, subtype)) proposedAnthroFeature = Raster("Proposed_" + subtype) postAnthroFeature = Con(IsNull(proposedAnthroFeature), currentAnthroFeature, proposedAnthroFeature) postAnthroFeature.save(os.path.join("Post_" + subtype))
def combineProposedWithCurrentCredit(anthroPath, uniqueProposedSubtypes): for subtype in uniqueProposedSubtypes: # Merge proposed and current feature rasters currentAnthroFeature = Raster(os.path.join(anthroPath, subtype)) proposedAnthroFeature = Raster("Proposed_" + subtype) postAnthroFeature = SetNull(proposedAnthroFeature, currentAnthroFeature, "Value = 1") postAnthroFeature.save(os.path.join("Post_" + subtype))
def make_weightrasters(): ap.Project_management(INPUTPOLYS,TMPPOLYS,ap.SpatialReference(OUTCOORDS)) ap.AddField_management(TMPPOLYS,"__area","DOUBLE") ap.AddField_management(TMPPOLYS,"__normalized","DOUBLE") ap.CalculateField_management(TMPPOLYS,"__area","!shape.area@kilometers!","PYTHON") ret = [] for f in RASTERFIELDS: ap.CalculateField_management(TMPPOLYS,"__normalized","!%s!/!__area!"%f,"PYTHON") ap.PolygonToRaster_conversion(TMPPOLYS,"__normalized",f,cellsize=OUTRES) distributed = Raster(f) * Raster(DISTRASTERS[f]) out = "d%s"%f distributed.save(out) ret.append(out) return ret
def reclassify_lulc(reclass_numbers, out_file_name): lulc_rast = Raster('nor_lulc_ext_prj.tif') remap_list = [[str(i), 1] for i in reclass_numbers] remap = RemapValue(remap_list) reclass_field = "VALUE" water_rast = Reclassify(lulc_rast, reclass_field, remap, "NODATA") water_rast.save(out_file_name)
def calcSubtypeDisturbance(AnthroFeatures, subtype, AnthroDisturbanceType): """calculate disturbance associated with each subtype""" distance = distanceDict[subtype] weight = weightDict[subtype] AnthroFeatures = Raster(AnthroFeatures) if distance > 0: arcpy.AddMessage(" Calculating direct and indirect effects of " + str(subtype)) outEucDist = EucDistance(AnthroFeatures, distance, cellSize) tmp1 = 100 - (1/(1 + Exp(((outEucDist / (distance/2))-1)*5))) * weight # sigmoidal # tmp1 = (100 - (weight * Power((1 - outEucDist/distance), 2))) # exponential # tmp1 = 100 - (weight - (outEucDist / distance) * weight) # linear tmp2 = Con(IsNull(tmp1), 100, tmp1) subtypeRaster = tmp2 subtypeRaster.save(AnthroDisturbanceType + "_" + subtype + "_Subtype_Disturbance") elif weight > 0: arcpy.AddMessage(" Calculating direct effects of " + str(subtype)) tmp3 = Con(IsNull(AnthroFeatures), 0, AnthroFeatures) subtypeRaster = 100 - (tmp3 * weight) subtypeRaster.save(AnthroDisturbanceType + "_" + subtype + "_Subtype_Disturbance") else: subtypeRaster = None return subtypeRaster
def clipGridByBuffer(inReportingUnitFeature, outName, inLandCoverGrid, inBufferDistance=None): if arcpy.Exists(outName): arcpy.Delete_management(outName) if inBufferDistance: # Buffering Reporting unit features... cellSize = Raster(inLandCoverGrid).meanCellWidth linearUnits = arcpy.Describe( inLandCoverGrid).spatialReference.linearUnitName bufferFloat = cellSize * (int(inBufferDistance) + 1) bufferDistance = "%s %s" % (bufferFloat, linearUnits) arcpy.Buffer_analysis(inReportingUnitFeature, "in_memory/ru_buffer", bufferDistance, "#", "#", "ALL") # Clipping input grid to desired extent... if inBufferDistance: clippedGrid = arcpy.Clip_management(inLandCoverGrid, "#", outName, "in_memory/ru_buffer", "", "NONE") arcpy.Delete_management("in_memory") else: clippedGrid = arcpy.Clip_management(inLandCoverGrid, "#", outName, inReportingUnitFeature, "", "NONE") arcpy.BuildRasterAttributeTable_management(clippedGrid, "Overwrite") return clippedGrid
def roughness(elevation_input, roughness_output): """ Description: calculates 32-bit float roughness Inputs: 'elevation_input' -- an input raster digital elevation model 'roughness_output' -- an output roughness raster Returned Value: Returns a raster dataset on disk Preconditions: requires an input elevation raster """ # Import packages import arcpy from arcpy.sa import FocalStatistics from arcpy.sa import NbrRectangle from arcpy.sa import Raster from arcpy.sa import Square # Set overwrite option arcpy.env.overwriteOutput = True # Define a neighborhood variable neighborhood = NbrRectangle(5, 5, "CELL") # Calculate the elevation standard deviation print('\t\tCalculating standard deviation...') standard_deviation = FocalStatistics(Raster(elevation_input), neighborhood, 'STD', 'DATA') # Calculate the square of standard deviation print('\t\tCalculating squared standard deviation...') out_raster = Square(standard_deviation) out_raster.save(roughness_output)
def compound_topographic(elevation_input, flow_accumulation, raw_slope, cti_output): """ Description: calculates 32-bit float compound topographic index Inputs: 'elevation_input' -- an input raster digital elevation model 'flow_accumulation' -- an input flow accumulation raster with the same spatial reference as the elevation raster 'raw_slope' -- an input raw slope raster in degrees with the same spatial reference as the elevation raster 'cti_output' -- an output compound topographic index raster Returned Value: Returns a raster dataset on disk Preconditions: requires input elevation, flow accumulation, and raw slope raster """ # Import packages import arcpy from arcpy.sa import Con from arcpy.sa import Divide from arcpy.sa import Ln from arcpy.sa import Plus from arcpy.sa import Raster from arcpy.sa import Times from arcpy.sa import Tan # Set overwrite option arcpy.env.overwriteOutput = True # Get spatial properties for the input elevation raster description = arcpy.Describe(elevation_input) cell_size = description.meanCellHeight # Convert degree slope to radian slope print('\t\tConverting degree slope to radians...') slope_radian = Divide(Times(Raster(raw_slope), 1.570796), 90) # Calculate slope tangent print('\t\tCalculating slope tangent...') slope_tangent = Con(slope_radian > 0, Tan(slope_radian), 0.001) # Correct flow accumulation print('\t\tModifying flow accumulation...') accumulation_corrected = Times(Plus(Raster(flow_accumulation), 1), cell_size) # Calculate compound topographic index as natural log of corrected flow accumulation divided by slope tangent print('\t\tCalculating compound topographic index...') out_raster = Ln(Divide(accumulation_corrected, slope_tangent)) out_raster.save(cti_output)
def imperviousness_raster(): out_file_name = 'neigh_imp.tif' imp_rast = Raster('nor_imperv.tif') neighborhood = NbrRectangle(width=1500, height=1500, units="MAP") neigh_imp = FocalStatistics(imp_rast, neighborhood=neighborhood, statistics_type="SUM", ignore_nodata="DATA") neigh_imp.save(out_file_name) return out_file_name
def path_allocation_basins(): src_raster = '{}/Stormwater Infrastructure/sw_struct_basins.shp'.format(gis_proj_dir) elev_raster = Raster(elev_raster) out_file_name = 'path_allo_basins.tif' pth_all = PathAllocation(src_raster, source_field="FID", in_surface_raster=elev_raster, in_vertical_raster=elev_raster) pth_all.save(out_file_name) return out_file_name
def calculate_dist_to_src(src_raster, out_file_name): """ uses water raster as source should have run reclassify_lulc_for_water first :return: """ elev_raster = Raster(elev_raster) path_dist = PathDistance(in_source_data=src_raster, in_surface_raster=elev_raster, in_vertical_raster=elev_raster) path_dist.save(out_file_name)
def extract_values_to_points(): target_shapefile = 'fld_nfld_pts.shp' elev_raster = Raster(elev_raster) raster_list = [['twi.tif', 'twi'], [elev_raster, 'elev'], ['path_dist_basins.tif', 'dist_to_basin'], ['path_allo_basins.tif', 'basin_id'], ['neigh_imp.tif', 'imp'], ['pth_dist_to_wat.tif', 'dist_to_wat']] ExtractMultiValuesToPoints(target_shapefile, raster_list, "NONE")
def getIntersectOfGrids(lccObj, inLandCoverGrid, inSlopeGrid, inSlopeThresholdValue, timer): # Generate the slope X land cover grid where areas below the threshold slope are # set to the value 'Maximum Land Cover Class Value + 1'. LCGrid = Raster(inLandCoverGrid) SLPGrid = Raster(inSlopeGrid) # find the highest value found in LCC XML file or land cover grid lccValuesDict = lccObj.values maxValue = LCGrid.maximum xmlValues = lccObj.getUniqueValueIdsWithExcludes() for v in xmlValues: if v > maxValue: maxValue = v AddMsg(timer.split() + " Generating land cover above slope threshold grid...") AreaBelowThresholdValue = int(maxValue + 1) delimitedVALUE = arcpy.AddFieldDelimiters(SLPGrid, "VALUE") whereClause = delimitedVALUE + " >= " + inSlopeThresholdValue SLPxLCGrid = Con(SLPGrid, LCGrid, AreaBelowThresholdValue, whereClause) # determine if a grid code is to be included in the effective reporting unit area calculation # get the frozenset of excluded values (i.e., values not to use when calculating the reporting unit effective area) excludedValues = lccValuesDict.getExcludedValueIds() # if certain land cover codes are tagged as 'excluded = TRUE', generate grid where land cover codes are # preserved for areas coincident with steep slopes, areas below the slope threshold are coded with the # AreaBelowThresholdValue except for where the land cover code is included in the excluded values list. # In that case, the excluded land cover values are maintained in the low slope areas. if excludedValues: # build a whereClause string (e.g. "VALUE" = 11 or "VALUE" = 12") to identify where on the land cover grid excluded values occur AddMsg( timer.split() + " Inserting EXCLUDED values into areas below slope threshold...") stringStart = delimitedVALUE + " = " stringSep = " or " + delimitedVALUE + " = " whereExcludedClause = stringStart + stringSep.join( [str(item) for item in excludedValues]) SLPxLCGrid = Con(LCGrid, LCGrid, SLPxLCGrid, whereExcludedClause) return SLPxLCGrid
def linear_aspect(raw_aspect, aspect_output): """ Description: calculates 32-bit float linear aspect Inputs: 'raw_aspect' -- an input raw aspect raster 'aspect_output' -- an output linear aspect raster Returned Value: Returns a raster dataset on disk Preconditions: requires an input DEM """ # Import packages import arcpy from arcpy.sa import ATan2 from arcpy.sa import Con from arcpy.sa import Cos from arcpy.sa import FocalStatistics from arcpy.sa import Mod from arcpy.sa import NbrRectangle from arcpy.sa import Raster from arcpy.sa import SetNull from arcpy.sa import Sin # Set overwrite option arcpy.env.overwriteOutput = True # Define a neighborhood variable neighborhood = NbrRectangle(3, 3, "CELL") # Calculate aspect transformations print('\t\tTransforming raw aspect to linear aspect...') setNull_aspect = SetNull( Raster(raw_aspect) < 0, (450.0 - Raster(raw_aspect)) / 57.296) sin_aspect = Sin(setNull_aspect) cos_aspect = Cos(setNull_aspect) sum_sin = FocalStatistics(sin_aspect, neighborhood, "SUM", "DATA") sum_cos = FocalStatistics(cos_aspect, neighborhood, "SUM", "DATA") mod_aspect = Mod( ((450 - (ATan2(sum_sin, sum_cos) * 57.296)) * 100), 36000 ) / 100 # The *100 and 36000(360*100) / 100 allow for two decimal points since Fmod appears to be gone out_raster = Con((sum_sin == 0) & (sum_cos == 0), -1, mod_aspect) # Save output raster file out_raster.save(aspect_output)
def function(DEM, streamNetwork, smoothDropBuffer, smoothDrop, streamDrop, outputReconDEM): try: # Set environment variables arcpy.env.extent = DEM arcpy.env.mask = DEM arcpy.env.cellSize = DEM # Set temporary variables prefix = "recon_" streamRaster = prefix + "streamRaster" # Determine DEM cell size and OID column name size = arcpy.GetRasterProperties_management(DEM, "CELLSIZEX") OIDField = arcpy.Describe(streamNetwork).OIDFieldName # Convert stream network to raster arcpy.PolylineToRaster_conversion(streamNetwork, OIDField, streamRaster, "", "", size) # Work out distance of cells from stream distanceFromStream = EucDistance(streamRaster, "", size) # Elements within a buffer distance of the stream are smoothly dropped intSmoothDrop = Con(distanceFromStream > float(smoothDropBuffer), 0, (float(smoothDrop) / float(smoothDropBuffer)) * (float(smoothDropBuffer) - distanceFromStream)) del distanceFromStream # Burn this smooth drop into DEM. Cells in stream are sharply dropped by the value of "streamDrop" binaryStream = Con(IsNull(Raster(streamRaster)), 0, 1) reconDEMTemp = Raster(DEM) - intSmoothDrop - (float(streamDrop) * binaryStream) del intSmoothDrop del binaryStream reconDEMTemp.save(outputReconDEM) del reconDEMTemp log.info("Reconditioned DEM generated") except Exception: log.error("DEM reconditioning function failed") raise
def integrated_moisture(elevation_input, flow_accumulation, zFactor, imi_output): """ Description: calculates 32-bit float integrated moisture index Inputs: elevation_input' -- an input raster digital elevation model 'flow_accumulation' -- an input flow accumulation raster with the same spatial reference as the elevation raster 'zFactor' -- a unit scaling factor for calculations that involve comparisons of xy to z 'imi_output' -- an output compound topographic index raster Returned Value: Returns a raster dataset on disk Preconditions: requires input elevation, """ # Import packages import arcpy from arcpy.sa import Curvature from arcpy.sa import Hillshade from arcpy.sa import Plus from arcpy.sa import Times from arcpy.sa import Raster # Set overwrite option arcpy.env.overwriteOutput = True # Adjust flow accumulation print('\t\tScaling flow accumulation...') adjusted_accumulation = Times(Raster(flow_accumulation), 0.35) # Calculate and adjust curvature print('\t\tCalculating curvature...') curvature = Curvature(Raster(elevation_input), zFactor) adjusted_curvature = Times(curvature, 0.15) # Calculate and adjust hillshade print('\t\tCalculating hillshade...') hillshade = Hillshade(Raster(elevation_input), "#", "#", "#", zFactor) adjusted_hillshade = Times(hillshade, 0.5) # Calculate integrated moisture index out_raster = Plus(Plus(adjusted_accumulation, adjusted_curvature), adjusted_hillshade) out_raster.save(imi_output)
def extract_raster(**kwargs): """ Description: extracts a raster to a mask Inputs: 'work_geodatabase' -- path to a file geodatabase that will serve as the workspace 'input_array' -- an array containing the target raster to extract (must be first) and the mask raster (must be second) 'output_array' -- an array containing the output raster Returned Value: Returns a raster dataset Preconditions: the initial raster must be created from other scripts and the study area raster must be created manually """ # Import packages import arcpy from arcpy.sa import ExtractByMask from arcpy.sa import Raster import datetime import time # Parse key word argument inputs work_geodatabase = kwargs['work_geodatabase'] input_raster = kwargs['input_array'][0] mask_raster = kwargs['input_array'][1] output_raster = kwargs['output_array'][0] # Set overwrite option arcpy.env.overwriteOutput = True # Set workspace arcpy.env.workspace = work_geodatabase # Set snap raster and extent arcpy.env.snapRaster = mask_raster arcpy.env.extent = Raster(mask_raster).extent # Extract raster to study area print('\t\tPerforming extraction to study area...') iteration_start = time.time() extract_raster = ExtractByMask(input_raster, mask_raster) arcpy.management.CopyRaster(extract_raster, output_raster, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE', 'CURRENT_SLICE', 'NO_TRANSPOSE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\t\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t\t----------') out_process = f'\tSuccessfully extracted raster data to mask.' return out_process
def mp_land_temperature(file): path_thermal = file + "\\" + file[-40:] + "_B10.tif" location = file[-30:-24] date = file[-24:-15] name = date + "_" + location print(name) arcpy.env.scratchWorkspace = os.path.join( arcpy.env.workspace, name) # r'C:\Users\yourname\PSU_LiDAR\f'+raster.replace(".img","") if not os.path.exists(arcpy.env.scratchWorkspace): os.makedirs(arcpy.env.scratchWorkspace) path_thermal = file + "\\" + file[-40:] + "_B10.tif" thermal_band = Raster(path_thermal) print thermal_band print "band condirmed" Rfloat = Float(thermal_band) top_temperature = Rfloat * RADIANCE_MULT_BAND + RADIANCE_ADD - Oi temp_tif = "temp{}.tif".format(name) top_temperature.save(temp_tif) top_temperature = Raster(temp_tif) top_temperature = Float(top_temperature) divide1 = Divide(K1_CONSTANT_BAND_10, (top_temperature + 1)) ln1 = Ln(divide1) surface_temp = Divide(K2_CONSTANT_BAND_10, ln1) - 273.15 location = file[-30:-24] date = file[-24:-15] name = date + "_" + location print name path = "surface_temp" + name + ".tif" print path surface_temp.save(path)
def create_mask(path_to_temp): arcpy.CheckOutExtension('Spatial') inputrasters = arcpy.GetParameterAsText(1).split(";") rasterlist = [] for elements in inputrasters: rasterobject = Raster(elements) rasterlist.append(rasterobject) rasterlistSum = sum(rasterlist) outcon = Con(IsNull(rasterlistSum) == 0,1) # result raster has 1 when value meets value and NoData when value meets NoData outcon.save(join(path_to_temp,"mask")) # filename is 'mask' #arcpy.CheckInExtension('Spatial') path_to_mask = join(path_to_temp,"mask") if arcpy.Exists(path_to_mask): arcpy.AddMessage("Creating mask.") return path_to_mask
def log4param(inlayer, type, outlayer): # set environment settings arcpy.env.snapRaster = "Y:/Tahoe/GISdata/Lattice_Clip30m.gdb/Lattice_Clip30m_ProjBound" arcpy.env.extent = "Y:/Tahoe/GISdata/Lattice_Clip30m.gdb/Lattice_Clip30m_ProjBound" arcpy.env.workspace = "Y:/Tahoe/GISdata/WorkGDBCreated051214.gdb/" # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension("spatial") x = inlayer if type == "slope": left = 1 right = 0.2 slope = 0.2 inflexion = 30 #rule = "".join([ str(left), "+((", str(right), "-", str(left), ")/(1+Exp(", str(slope), "*(", str(inflexion), "-", x, "))))"]) #arcpy.RasterCalculator(rule,outlayer) if type == "suscTPI": left = 0.8 right = 1.2 slope = 1 inflexion = 0 if type == "suitTPI": left = 0.4 right = 1 slope = 1 inflexion = 0 if type == "roadmech": left = 1 right = 0 slope = 0.015 inflexion = 300 if type == "roadburn": left = 1 right = 0.1 slope = 0.001 inflexion = 5000 outRas = left + ((right - left) / (1 + Exp(slope * (inflexion - Raster(x))))) outRas.save(outlayer)
def surface_area(raw_slope, area_output): """ Description: calculates 32-bit float surface area ratio Inputs: 'raw_slope' -- an input raw slope raster 'roughness_output' -- an output roughness raster Returned Value: Returns a raster dataset on disk Preconditions: requires an input elevation raster """ # Import packages import arcpy from arcpy.sa import Cos from arcpy.sa import Float from arcpy.sa import Raster import math # Set overwrite option arcpy.env.overwriteOutput = True # Getting info on raster description = arcpy.Describe(raw_slope) cell_size = description.meanCellHeight # Set the cell size environment arcpy.env.cellSize = cell_size # Calculate cell area cell_area = cell_size * cell_size # Modify raw slope print('\t\tModifying raw slope...') modifier = math.pi / 180 modified_slope = Raster(raw_slope) * modifier # Calculate surface area ratio print('\t\tCalculating surface area ratio...') out_raster = Float(cell_area) / Cos(modified_slope) out_raster.save(area_output)
def calculate_LST(source_dir): dir_name = os.path.dirname(source_dir) basename = os.path.basename(source_dir) print(dir_name, basename) workspace = create_dir(basename + ".gdb") print workspace env.workspace = workspace env.overwriteOutput = True Band4 = "{}/{}_B4.tif".format(source_dir, basename) Band5 = "{}/{}_B5.tif".format(source_dir, basename) Band10 = "{}/{}_B10.tif".format(source_dir, basename) Band4_R = Raster(Band4) Band5_R = Raster(Band5) Band5_R.save("{}/{}_bands".format(workspace, basename)) Band4 = Float(Band4_R) Band5 = Float(Band5_R) Band10 = Float(Raster(Band10)) NDVI = Divide((Band5 - Band4), (Band5 + Band4)) NDVI_path = "{}/{}_ndvi".format(workspace, basename) print NDVI_path NDVI.save(NDVI_path) NDVI_max = GetRasterProperties_management( NDVI_path, property_type="MAXIMUM").getOutput(0) NDVI_min = GetRasterProperties_management( NDVI_path, property_type="MINIMUM").getOutput(0) a = Minus(NDVI, -1) b = float(NDVI_max) - float(NDVI_min) c = Divide(a, b) PV = Square(c) E = 0.004 * PV + 0.986 E.save("{}/{}_E".format(workspace, basename)) TOA = 0.0003342 * Band10 + 0.1 BT = 1321.08 / Ln((774.89 / TOA) + 1) - 273.15 d = 1 + (0.00115 * BT / 1.4388) + Ln(E) LST = Divide(BT, d) print LST, type(LST) LST_Path = "{}/{}_LST".format(workspace, basename) LST.save(LST_Path) print(LST_Path)
def getViewGrid(classValuesList, excludedValuesList, inLandCoverGrid, landCoverValues, viewRadius, conValues, timer): # create class (value = 1) / other (value = 0) / excluded grid (value = 0) raster # define the reclass values classValue = 1 excludedValue = 0 otherValue = 0 newValuesList = [classValue, excludedValue, otherValue] # generate a reclass list where each item in the list is a two item list: the original grid value, and the reclass value reclassPairs = getInOutOtherReclassPairs(landCoverValues, classValuesList, excludedValuesList, newValuesList) AddMsg(( "{0} Reclassifying selected land cover class to 1. All other values = 0..." ).format(timer.split())) reclassGrid = Reclassify(inLandCoverGrid, "VALUE", RemapValue(reclassPairs)) AddMsg(( "{0} Performing focal SUM on reclassified raster using {1} cell radius neighborhood..." ).format(timer.split(), viewRadius)) neighborhood = arcpy.sa.NbrCircle(int(viewRadius), "CELL") focalGrid = arcpy.sa.FocalStatistics(reclassGrid == classValue, neighborhood, "SUM") AddMsg(( "{0} Reclassifying focal SUM results into view = 0 and no-view = 1 binary raster..." ).format(timer.split())) # delimitedVALUE = arcpy.AddFieldDelimiters(focalGrid,"VALUE") # whereClause = delimitedVALUE+" = 0" # viewGrid = Con(focalGrid, 1, 0, whereClause) whereValue = conValues[0] trueValue = conValues[1] viewGrid = Con(Raster(focalGrid) == whereValue, trueValue) return viewGrid
def merge_spectral_tiles(**kwargs): """ Description: extracts spectral tiles to an area and mosaics extracted tiles with first data priority Inputs: 'cell_size' -- a cell size for the output spectral raster 'output_projection' -- the machine number for the output projection 'work_geodatabase' -- a geodatabase to store temporary results 'input_array' -- an array containing the grid raster (must be first), the study area raster (must be second), and the list of spectral tiles 'output_array' -- an array containing the output spectral grid raster Returned Value: Returns a raster dataset on disk containing the merged spectral grid raster Preconditions: requires processed source spectral tiles and predefined grid """ # Import packages import arcpy from arcpy.sa import ExtractByMask from arcpy.sa import IsNull from arcpy.sa import Nibble from arcpy.sa import Raster from arcpy.sa import SetNull import datetime import os import time # Parse key word argument inputs cell_size = kwargs['cell_size'] output_projection = kwargs['output_projection'] work_geodatabase = kwargs['work_geodatabase'] tile_inputs = kwargs['input_array'] grid_raster = tile_inputs.pop(0) study_area = tile_inputs.pop(0) spectral_grid = kwargs['output_array'][0] # Set overwrite option arcpy.env.overwriteOutput = True # Use two thirds of cores on processes that can be split. arcpy.env.parallelProcessingFactor = "75%" # Set snap raster and extent arcpy.env.snapRaster = study_area arcpy.env.extent = Raster(grid_raster).extent # Define the output coordinate system output_system = arcpy.SpatialReference(output_projection) # Define intermediate rasters mosaic_raster = os.path.splitext(spectral_grid)[0] + '_mosaic.tif' nibble_raster = os.path.splitext(spectral_grid)[0] + '_nibble.tif' spectral_area = os.path.splitext(spectral_grid)[0] + '_area.tif' # Define folder structure grid_title = os.path.splitext(os.path.split(grid_raster)[1])[0] mosaic_location, mosaic_name = os.path.split(mosaic_raster) # Create source folder within mosaic location if it does not already exist source_folder = os.path.join(mosaic_location, 'sources') if os.path.exists(source_folder) == 0: os.mkdir(source_folder) # Create an empty list to store existing extracted source rasters for the grid input_length = len(tile_inputs) input_rasters = [] # Identify raster extent of grid print(f'\tExtracting {input_length} spectral tiles...') grid_extent = Raster(grid_raster).extent grid_array = arcpy.Array() grid_array.add(arcpy.Point(grid_extent.XMin, grid_extent.YMin)) grid_array.add(arcpy.Point(grid_extent.XMin, grid_extent.YMax)) grid_array.add(arcpy.Point(grid_extent.XMax, grid_extent.YMax)) grid_array.add(arcpy.Point(grid_extent.XMax, grid_extent.YMin)) grid_array.add(arcpy.Point(grid_extent.XMin, grid_extent.YMin)) grid_polygon = arcpy.Polygon(grid_array) # Save grid polygon grid_feature = os.path.join(work_geodatabase, 'grid_polygon') arcpy.management.CopyFeatures(grid_polygon, grid_feature) arcpy.management.DefineProjection(grid_feature, output_system) # Iterate through all input tiles and extract to grid if they overlap count = 1 for raster in tile_inputs: output_raster = os.path.join(source_folder, os.path.split(raster)[1]) if os.path.exists(output_raster) == 0: # Identify raster extent of tile tile_extent = Raster(raster).extent tile_array = arcpy.Array() tile_array.add(arcpy.Point(tile_extent.XMin, tile_extent.YMin)) tile_array.add(arcpy.Point(tile_extent.XMin, tile_extent.YMax)) tile_array.add(arcpy.Point(tile_extent.XMax, tile_extent.YMax)) tile_array.add(arcpy.Point(tile_extent.XMax, tile_extent.YMin)) tile_array.add(arcpy.Point(tile_extent.XMin, tile_extent.YMin)) tile_polygon = arcpy.Polygon(tile_array) # Save tile polygon tile_feature = os.path.join(work_geodatabase, 'tile_polygon') arcpy.CopyFeatures_management(tile_polygon, tile_feature) arcpy.DefineProjection_management(tile_feature, output_system) # Select tile extent with grid extent selection = int( arcpy.GetCount_management( arcpy.management.SelectLayerByLocation( tile_feature, 'INTERSECT', grid_feature, '', 'NEW_SELECTION', 'NOT_INVERT')).getOutput(0)) # If tile overlaps grid then perform extraction if selection == 1: # Extract raster to mask print( f'\t\tExtracting spectral tile {count} of {input_length}...' ) iteration_start = time.time() extract_raster = ExtractByMask(raster, grid_raster) # Copy extracted raster to output print(f'\t\tSaving spectral tile {count} of {input_length}...') arcpy.management.CopyRaster(extract_raster, output_raster, '', '0', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE', 'CURRENT_SLICE', 'NO_TRANSPOSE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\t\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t\t----------') # If tile does not overlap grid then report message else: print( f'\t\tSpectral tile {count} of {input_length} does not overlap grid...' ) print('\t\t----------') # Remove tile feature class if arcpy.Exists(tile_feature) == 1: arcpy.management.Delete(tile_feature) # If extracted tile already exists then report message else: print( f'\t\tExtracted spectral tile {count} of {input_length} already exists...' ) print('\t\t----------') # If the output raster exists then append it to the raster list if os.path.exists(output_raster) == 1: input_rasters.append(output_raster) count += 1 # Remove grid feature if arcpy.Exists(grid_feature) == 1: arcpy.management.Delete(grid_feature) print(f'\tFinished extracting {input_length} spectral tiles.') print('\t----------') # Mosaic raster tiles to new raster print(f'\tMosaicking the input rasters for {grid_title}...') iteration_start = time.time() arcpy.management.MosaicToNewRaster(input_rasters, mosaic_location, mosaic_name, output_system, '16_BIT_SIGNED', cell_size, '1', 'MAXIMUM', 'FIRST') # Enforce correct projection arcpy.management.DefineProjection(mosaic_raster, output_system) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') # Calculate the missing area print('\tCalculating null space...') iteration_start = time.time() raster_null = SetNull(IsNull(Raster(mosaic_raster)), 1, 'VALUE = 1') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') # Impute missing data by nibbling the NoData from the focal mean print('\tImputing missing values by geographic nearest neighbor...') iteration_start = time.time() raster_filled = Nibble(Raster(mosaic_raster), raster_null, 'DATA_ONLY', 'PROCESS_NODATA', '') # Copy nibble raster to output print(f'\tSaving filled raster...') arcpy.management.CopyRaster(raster_filled, nibble_raster, '', '0', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE', 'CURRENT_SLICE', 'NO_TRANSPOSE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') # Remove overflow fill from the study area print('\tRemoving overflow fill from study area...') iteration_start = time.time() raster_preliminary = ExtractByMask(nibble_raster, study_area) # Copy preliminary extracted raster to output arcpy.management.CopyRaster(raster_preliminary, spectral_area, '', '0', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE', 'CURRENT_SLICE', 'NO_TRANSPOSE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') # Remove overflow fill from the grid print('\tRemoving overflow fill from grid...') iteration_start = time.time() raster_final = ExtractByMask(spectral_area, grid_raster) arcpy.management.CopyRaster(raster_final, spectral_grid, '', '0', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE', 'CURRENT_SLICE', 'NO_TRANSPOSE') # Delete intermediate rasters if arcpy.Exists(mosaic_raster) == 1: arcpy.management.Delete(mosaic_raster) if arcpy.Exists(nibble_raster) == 1: arcpy.management.Delete(nibble_raster) if arcpy.Exists(spectral_area) == 1: arcpy.management.Delete(spectral_area) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') out_process = f'Successfully created {os.path.split(spectral_grid)[1]}' return out_process
def reproject_integer(**kwargs): """ Description: reprojects a raster and converts to integer by a specified multiplicative factor Inputs: 'cell_size' -- a cell size for the output DEM 'input_projection' -- the machine number for the input projection 'output_projection' -- the machine number for the output projection 'geographic_transformation -- the string representation of the appropriate geographic transformation (blank if none required) 'conversion_factor' -- a number that will be multiplied with the original value before being converted to integer 'input_array' -- an array containing the input raster and the snap raster 'output_array' -- an array containing the output raster """ # Import packages import arcpy from arcpy.sa import Int from arcpy.sa import Raster import datetime import os import time # Parse key word argument inputs cell_size = kwargs['cell_size'] input_projection = kwargs['input_projection'] output_projection = kwargs['output_projection'] geographic_transformation = kwargs['geographic_transformation'] conversion_factor = kwargs['conversion_factor'] input_raster = kwargs['input_array'][0] snap_raster = kwargs['input_array'][1] output_raster = kwargs['output_array'][0] # Set overwrite option arcpy.env.overwriteOutput = True # Set snap raster arcpy.env.snapRaster = snap_raster # Define the input and output coordinate systems input_system = arcpy.SpatialReference(input_projection) output_system = arcpy.SpatialReference(output_projection) # Project raster to output coordinate system print(f'\tReprojecting input raster...') iteration_start = time.time() # Define intermediate and output raster reprojected_raster = os.path.splitext(input_raster)[0] + '_reprojected.tif' # Define initial coordinate system arcpy.management.DefineProjection(input_raster, input_system) # Reproject raster arcpy.management.ProjectRaster(input_raster, reprojected_raster, output_system, 'BILINEAR', cell_size, geographic_transformation, '', input_system) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success for iteration print(f'\tProjection completed at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})') print('\t----------') # Round to integer and store as 16 bit signed raster print(f'\tConverting raster to 16 bit integer...') iteration_start = time.time() integer_raster = Int((Raster(reprojected_raster) * conversion_factor) + 0.5) arcpy.management.CopyRaster(integer_raster, output_raster, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # Delete intermediate raster arcpy.management.Delete(reprojected_raster) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success for iteration print(f'\tConversion completed at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})') print('\t----------') # Delete intermediate dataset out_process = 'Successful reprojecting and converting raster.' return out_process
def create_composite_dem(**kwargs): """ Description: mosaics extracted source rasters with first data priority and extracts to mask Inputs: 'cell_size' -- a cell size for the output DEM 'output_projection' -- the machine number for the output projection 'work_geodatabase' -- a geodatabase to store temporary results 'input_array' -- an array containing the grid raster (must be first) and the list of sources DEMs in prioritized order 'output_array' -- an array containing the output raster Returned Value: Returns a raster dataset on disk containing the merged source DEM Preconditions: requires source DEMs and predefined grid """ # Import packages import arcpy from arcpy.sa import ExtractByMask from arcpy.sa import Raster import datetime import os import time # Parse key word argument inputs cell_size = kwargs['cell_size'] output_projection = kwargs['output_projection'] elevation_inputs = kwargs['input_array'] grid_raster = elevation_inputs.pop(0) composite_raster = kwargs['output_array'][0] # Set overwrite option arcpy.env.overwriteOutput = True # Use two thirds of cores on processes that can be split. arcpy.env.parallelProcessingFactor = "75%" # Set snap raster and extent arcpy.env.snapRaster = grid_raster arcpy.env.extent = Raster(grid_raster).extent # Determine input raster value type value_number = arcpy.management.GetRasterProperties( elevation_inputs[0], "VALUETYPE")[0] no_data_value = arcpy.Describe(elevation_inputs[0]).noDataValue value_dictionary = { 0: '1_BIT', 1: '2_BIT', 2: '4_BIT', 3: '8_BIT_UNSIGNED', 4: '8_BIT_SIGNED', 5: '16_BIT_UNSIGNED', 6: '16_BIT_SIGNED', 7: '32_BIT_UNSIGNED', 8: '32_BIT_SIGNED', 9: '32_BIT_FLOAT', 10: '64_BIT' } value_type = value_dictionary.get(int(value_number)) print(f'Output data type will be {value_type}.') print(f'Output no data value will be {no_data_value}.') # Define the target projection composite_projection = arcpy.SpatialReference(output_projection) # Define folder structure grid_title = os.path.splitext(os.path.split(grid_raster)[1])[0] mosaic_location, mosaic_name = os.path.split(composite_raster) # Create mosaic location if it does not already exist if os.path.exists(mosaic_location) == 0: os.mkdir(mosaic_location) # Create source folder within mosaic location if it does not already exist source_folder = os.path.join(mosaic_location, 'sources') if os.path.exists(source_folder) == 0: os.mkdir(source_folder) # Create an empty list to store existing extracted source rasters for the area of interest input_length = len(elevation_inputs) input_rasters = [] count = 1 # Iterate through all input rasters to extract to grid and append to input list for raster in elevation_inputs: # Define output raster file path output_raster = os.path.join(source_folder, os.path.split(raster)[1]) # Extract input raster if extracted raster does not already exist if os.path.exists(output_raster) == 0: try: print( f'\tExtracting elevation source {count} of {input_length}...' ) iteration_start = time.time() # Extract raster to mask extract_raster = ExtractByMask(raster, grid_raster) # Copy extracted raster to output print( f'\tSaving elevation source {count} of {input_length}...') arcpy.management.CopyRaster(extract_raster, output_raster, '', '', no_data_value, 'NONE', 'NONE', value_type, 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') except: print('\tElevation source does not overlap grid...') print('\t----------') else: print( f'\tExtracted elevation source {count} of {input_length} already exists...' ) print('\t----------') # Append extracted input raster to inputs list if os.path.exists(output_raster) == 1: input_rasters.append(output_raster) # Increase counter count += 1 # Append the grid raster to the list of input rasters input_rasters.append(grid_raster) # Report the raster priority order raster_order = [] for raster in input_rasters: name = os.path.split(raster)[1] raster_order.append(name) print(f'\tPriority of input sources for {grid_title}...') count = 1 for raster in raster_order: print(f'\t\t{count}. {raster}') # Increase the counter count += 1 # Mosaic raster tiles to new raster print(f'\tMosaicking the input rasters for {grid_title}...') iteration_start = time.time() arcpy.management.MosaicToNewRaster(input_rasters, mosaic_location, mosaic_name, composite_projection, value_type, cell_size, '1', 'FIRST', 'FIRST') # Enforce correct projection arcpy.management.DefineProjection(composite_raster, composite_projection) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') out_process = f'Finished elevation composite for {grid_title}.' return out_process