def main(argv=None): """Iterate over LM, BM, and restoration tasks.""" if argv is None: argv = sys.argv # Get parameters from ArcGIS tool dialog start_time = time.clock() # USER SETTINGS ###################################################### # Restoration Settings # ALL input data must be in the same projection # Set to True to restore highest ROI. Set to False to restore strongest # barrier restore_max_roi = argv[1] # Resistance value of restored habitat. Must be 1 or greater. restored_resistance_val = argv[2] # No spaces or special chars in paths or gdb names restoration_data_gdb = argv[3] # No spaces in path, avoid using dropbox or network drive # Project directories will be created in this (iter1, iter2...) as will an # output geodatabase output_dir = argv[4] # Resistance raster. Should be in input GDB resistance_ras = argv[5] # Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant' core_fc = argv[6] core_fn = argv[7] # Core area field name radius = argv[8] # Restoration radius in meters iterations = argv[9] # Number of restorations to perform # If less than this proportion of ag in circle, don't consider restoring # circle min_ag_threshold = argv[10] # Don't consider barriers below this improvement score (average improvement # per meter diameter restored) min_improvement_val = argv[11] # Average per-m2 parcel cost per pixel. Snapped to resistance raster. parcel_cost_ras = argv[12] # Right now this is just a raster with all pixels set to 0.113174 restoration_cost_ras = argv[13] ag_ras = argv[14] # 1=Ag, 0=Not Ag # Some restorations benefit multiple corridors. # 'Maximum' takes the greatest improvement across core area pairs # 'Sum' adds improvement scores acreoss all pairs. barrier_combine_method = argv[15] # Use cwd_thresh = None for no threshold. Use cwd_thresh = X to not # consider restorations more than X map units away from each core area. cwd_thresh = argv[16] # END USER SETTINGS ###################################################### try: # Setup path and create directories gprint('Hey! Make sure everything is in the same projection!\n') gprint('Setting up paths and creating directories') sys.path.append('..\\toolbox\\scripts') res_ras = os.path.join(restoration_data_gdb, resistance_ras) core_fc_path = os.path.join(restoration_data_gdb, core_fc) # Set up a NEW output gdb (leave previous ones on drive) i = None for i in range(1, 200): output_gdb = 'restorationOutput' + str(i) + '.gdb' if not arcpy.Exists(os.path.join(output_dir, output_gdb)): break gprint('Previous output GDB ' + output_gdb + ' exists. ' 'Delete to save disk space.') arcpy.CreateFileGDB_management(output_dir, output_gdb) output_gdb = os.path.join(output_dir, output_gdb) log_file = os.path.join(output_gdb, 'Iterate Barriers' + str(i) + '.py') # Write a copy of this file to output dir as a record of settings shutil.copyfile(__file__, log_file) arcpy.env.cellSize = res_ras arcpy.env.extent = res_ras arcpy.env.snapRaster = res_ras arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb spatialref = arcpy.Describe(res_ras).spatialReference mapunits = spatialref.linearUnitName gprint('Cell size = ' + str(arcpy.env.cellSize) + ' ' + mapunits + 's') # Calculate fraction of ag within radius of each pixel gprint('Calculating purchase cost, fraction of ag, etc within radius ' 'of each pixel.') ag_ras = os.path.join(restoration_data_gdb, ag_ras) in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP") arcpy.env.extent = ag_ras out_focal_stats = arcpy.sa.FocalStatistics(ag_ras, in_neighborhood, "MEAN", "NODATA") proportion_ag_ras = os.path.join(output_gdb, 'proportionAgRas') out_focal_stats.save(proportion_ag_ras) arcpy.env.extent = res_ras # Calculate purchase cost of circles parcel_cost_ras = os.path.join(restoration_data_gdb, parcel_cost_ras) arcpy.env.extent = parcel_cost_ras out_focal_stats = arcpy.sa.FocalStatistics(parcel_cost_ras, in_neighborhood, "MEAN", "DATA") cost_focal_stats_ras = os.path.join(output_gdb, 'cost_focal_stats_ras') out_focal_stats.save(cost_focal_stats_ras) arcpy.env.extent = res_ras circle_area = float(npy.pi * radius * radius) outras = arcpy.sa.Raster(cost_focal_stats_ras) * circle_area purch_cost_ras = os.path.join(output_gdb, 'purchaseCostRaster') outras.save(purch_cost_ras) lu.delete_data(cost_focal_stats_ras) restoration_cost_ras = os.path.join(restoration_data_gdb, restoration_cost_ras) outras = ( arcpy.sa.Raster(purch_cost_ras) + (arcpy.sa.Raster(restoration_cost_ras) * radius * radius * npy.pi)) total_cost_ras = os.path.join(output_gdb, 'totalCostRaster') outras.save(total_cost_ras) # Create mask to remove areas without cost data arcpy.env.extent = total_cost_ras cost_mask_ras = os.path.join(output_gdb, 'costMaskRaster') cost_thresh = 0 out_con = arcpy.sa.Con( (arcpy.sa.Raster(total_cost_ras) > float(cost_thresh)), 1) out_con.save(cost_mask_ras) arcpy.env.extent = res_ras # Create mask to remove areas below ag threshold out_con = arcpy.sa.Con( (arcpy.sa.Raster(proportion_ag_ras) > float(min_ag_threshold)), 1) ag_mask_ras = os.path.join(output_gdb, 'agMaskRaster') out_con.save(ag_mask_ras) do_step_1 = 'true' do_step_2 = 'true' do_step_5 = 'false' all_restored_areas_ras = '' for cur_iter in range(1, iterations + 1): start_time1 = time.clock() # Some env settings get changed by linkage mapper and must be # reset here arcpy.env.cellSize = res_ras arcpy.env.extent = res_ras arcpy.env.snapRaster = res_ras arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb lu.dashline(1) gprint('Running iteration number ' + str(cur_iter)) proj_dir = os.path.join(output_dir, 'iter' + str(cur_iter) + 'Proj') lu.create_dir(output_dir) lu.delete_dir(proj_dir) lu.create_dir(proj_dir) if cur_iter > 1: # Copy previous s2 linktable to new project dir datapass_dir = os.path.join(proj_dir, 'datapass') lu.create_dir(datapass_dir) proj_dir1 = os.path.join(output_dir, 'iter1Proj') datapass_dir_iter1 = os.path.join(proj_dir1, 'datapass') s2_link_tbl_iter1 = os.path.join(datapass_dir_iter1, 'linkTable_s2.csv') s2_link_tbl = os.path.join(datapass_dir, 'linkTable_s2.csv') shutil.copyfile(s2_link_tbl_iter1, s2_link_tbl) # Run Linkage Mapper # Copy distances text file from earlier LM run to the output # directory- speeds things up! dist_file = os.path.join(output_dir, core_fc + '_dists.txt') if not os.path.exists(dist_file): if cur_iter == 1: gprint('Will calculate distance file.') dist_file = '#' else: proj_dir1 = os.path.join(output_dir, 'iter1Proj') dist_file1 = os.path.join(proj_dir1, core_fc + '_dists.txt') # Put a copy here for future runs shutil.copyfile(dist_file1, dist_file) arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb argv = ('lm_master.py', proj_dir, core_fc_path, core_fn, res_ras, do_step_1, do_step_2, 'Cost-Weighted & Euclidean', dist_file, 'true', 'true', 'false', '4', 'Cost-Weighted', 'true', do_step_5, 'true', '200000', '10000', '#', '#', '#', '#') gprint('Running ' + str(argv)) lm_master.lm_master(argv) do_step_1 = 'false' # Can skip for future iterations do_step_2 = 'false' # Can skip for future iterations do_step_5 = 'false' # Skipping for future iterations start_radius = str(radius) end_radius = str(radius) radius_step = '0' save_radius_ras = 'false' write_pct_ras = 'false' argv = ('barrier_master.py', proj_dir, res_ras, start_radius, end_radius, radius_step, barrier_combine_method, save_radius_ras, write_pct_ras, cwd_thresh) gprint('Running ' + str(argv)) barrier_master.bar_master(argv) # Some env settings get changed by linkage mapper and must be # reset here arcpy.env.cellSize = res_ras arcpy.env.extent = res_ras arcpy.env.snapRaster = res_ras arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb gprint('Finding restoration circles with max barrier score / ROI') # Find points with max ROI prefix = os.path.basename(proj_dir) if barrier_combine_method == 'Sum': sum_suffix = 'Sum' else: sum_suffix = '' barrier_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" + str(radius)) barrier_ras = os.path.join(proj_dir, 'output', 'barriers.gdb', barrier_fn) if not arcpy.Exists(barrier_ras): msg = ('Error: cannot find barrier output: ' + barrier_ras) lu.raise_error(msg) if cur_iter > 1: gprint('Creating mask for previously restored areas') in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP") arcpy.env.extent = all_restored_areas_ras out_focal_stats = arcpy.sa.FocalStatistics( all_restored_areas_ras, in_neighborhood, "MEAN", "DATA") all_restored_focal_ras = os.path.join( output_gdb, 'allRestFocRas_iter' + str(cur_iter)) # Anything > 0 would include a restored area out_focal_stats.save(all_restored_focal_ras) arcpy.env.extent = res_ras rest_mask_ras = os.path.join( output_gdb, 'restMaskRaster_iter' + str(cur_iter)) minval = 0 out_con = arcpy.sa.Con( (arcpy.sa.Raster(all_restored_focal_ras) == float(minval)), 1) out_con.save(rest_mask_ras) # Candidate areas have not been restored, have cost data, meet # minimum improvement score criteria, and have enough ag in them candidate_barrier_ras = os.path.join( output_gdb, 'candidateBarrierRaster' + '_iter' + str(cur_iter)) if cur_iter > 1: gprint('Creating candidate restoration raster using barrier ' 'results, previous restorations, and selection ' 'criteria') # ROI scores will be in terms of total improvement # (= score * diameter) out_calc = (arcpy.sa.Raster(cost_mask_ras) * arcpy.sa.Raster(ag_mask_ras) * arcpy.sa.Raster(barrier_ras) * arcpy.sa.Raster(rest_mask_ras) * (radius * 2)) else: out_calc = (arcpy.sa.Raster(cost_mask_ras) * arcpy.sa.Raster(ag_mask_ras) * arcpy.sa.Raster(barrier_ras) * radius * 2) min_barrier_score = min_improvement_val * radius * 2 if restored_resistance_val != 1: out_calc_2 = (out_calc - (2 * radius * (restored_resistance_val - 1))) out_con = arcpy.sa.Con( (out_calc_2 >= float(min_barrier_score)), out_calc_2) else: out_con = arcpy.sa.Con((out_calc >= float(min_barrier_score)), out_calc) out_con.save(candidate_barrier_ras) lu.build_stats(candidate_barrier_ras) purchase_roi_ras = os.path.join( output_gdb, 'purchaseRoiRaster' + '_iter' + str(cur_iter)) out_calc = (arcpy.sa.Raster(candidate_barrier_ras) / arcpy.sa.Raster(purch_cost_ras)) out_calc.save(purchase_roi_ras) lu.build_stats(purchase_roi_ras) total_roi_ras = os.path.join( output_gdb, 'purchaseRestRoiRaster' + '_iter' + str(cur_iter)) out_calc = (arcpy.sa.Raster(candidate_barrier_ras) / arcpy.sa.Raster(total_cost_ras)) out_calc.save(total_roi_ras) lu.build_stats(total_roi_ras) max_barrier = float( arcpy.GetRasterProperties_management(candidate_barrier_ras, "MAXIMUM").getOutput(0)) gprint('Maximum barrier improvement score: ' + str(max_barrier)) if max_barrier < 0: arcpy.AddWarning("\nNo barriers found that meet CWD or Ag " "threshold criteria.") max_purch_roi = arcpy.GetRasterProperties_management( purchase_roi_ras, "MAXIMUM") gprint('Maximum purchase ROI score: ' + str(max_purch_roi.getOutput(0))) max_roi = arcpy.GetRasterProperties_management( total_roi_ras, "MAXIMUM") gprint('Maximum total ROI score: ' + str(max_roi.getOutput(0))) if restore_max_roi: out_point = os.path.join( output_gdb, 'maxRoiPoint' + '_iter' + str(cur_iter)) gprint('Choosing circle with maximum ROI to restore') out_con = arcpy.sa.Con( (arcpy.sa.Raster(total_roi_ras) >= float( max_roi.getOutput(0))), total_roi_ras) max_roi_ras = os.path.join(output_gdb, 'max_roi_ras') out_con.save(max_roi_ras) # Save max ROI to point try: arcpy.RasterToPoint_conversion(max_roi_ras, out_point) except Exception: msg = ('Error: it looks like there are no viable ' 'restoration candidates.') lu.raise_error(msg) else: # Restoring strongest barrier instead out_point = os.path.join( output_gdb, 'maxBarrierPoint' + '_iter' + str(cur_iter)) gprint('Choosing circle with maximum BARRIER IMPROVEMENT SCORE' ' to restore') out_con = arcpy.sa.Con( (arcpy.sa.Raster(candidate_barrier_ras) >= max_barrier), candidate_barrier_ras) max_barrier_ras = os.path.join(output_gdb, 'maxBarrierRaster') out_con.save(max_barrier_ras) # Save max barrier to point try: arcpy.RasterToPoint_conversion(max_barrier_ras, out_point) except Exception: msg = ('Error: it looks like there are no viable ' 'restoration candidates.') lu.raise_error(msg) gprint('Done evaluating candidate restorations') result = int(arcpy.GetCount_management(out_point).getOutput(0)) if result > 1: # Would be better to retain point with max barrier score when # we have multiple points with same ROI arcpy.AddWarning('Deleting points with identical ' 'ROI/improvement score values') arcpy.DeleteIdentical_management(out_point, "grid_code", 0.1, 0.1) arcpy.sa.ExtractMultiValuesToPoints( out_point, [[candidate_barrier_ras, "barrierScore"], [purch_cost_ras, "purchCost"], [total_cost_ras, "totalCost"], [purchase_roi_ras, "purchaseROI"], [total_roi_ras, "totalROI"]], "NONE") arcpy.AddField_management(out_point, "restorationNumber", "SHORT") arcpy.CalculateField_management(out_point, "restorationNumber", cur_iter, "PYTHON_9.3") arcpy.AddField_management(out_point, "radius", "DOUBLE") arcpy.CalculateField_management(out_point, "radius", radius, "PYTHON_9.3") arcpy.AddField_management(out_point, "barrierScore_per_m", "DOUBLE") arcpy.CalculateField_management( out_point, "barrierScore_per_m", "(float(!barrierScore!) / (!radius! * 2))", "PYTHON_9.3") gprint('\nCreating restoration circles') if restore_max_roi: circle_fc = os.path.join( output_gdb, 'maxRoiCircle' + '_iter' + str(cur_iter)) else: circle_fc = os.path.join( output_gdb, 'maxBarrierCircle' + '_iter' + str(cur_iter)) arcpy.Buffer_analysis(out_point, circle_fc, radius) gprint('Rasterizing restoration circles') if restore_max_roi: circle_ras = os.path.join( output_gdb, 'maxRoicircle_ras' + '_iter' + str(cur_iter)) else: circle_ras = os.path.join( output_gdb, 'maxBarrierCircleRas' + '_iter' + str(cur_iter)) arcpy.FeatureToRaster_conversion(circle_fc, 'totalROI', circle_ras, arcpy.env.cellSize) # restore raster gprint('Digitally restoring resistance raster') res_ras_restored = os.path.join( output_gdb, 'resRastRestored' + '_iter' + str(cur_iter)) out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), res_ras, restored_resistance_val) out_con.save(res_ras_restored) all_restored_areas_ras = os.path.join( output_gdb, 'allRestoredAreas_iter' + str(cur_iter)) prev_restored_areas_ras = os.path.join( output_gdb, 'allRestoredAreas_iter' + str(cur_iter - 1)) if cur_iter == 1: out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), 0, 1) else: # Add this restoration to areas restored out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), prev_restored_areas_ras, 1) out_con.save(all_restored_areas_ras) lu.delete_data(circle_ras) # Use for next iteration resistance raster res_ras = res_ras_restored # Add circle into feature class with all circles if restore_max_roi: all_circles_fc = os.path.join(output_gdb, "allCirclesMaxROI") else: all_circles_fc = os.path.join(output_gdb, "allCirclesMaxBarriers") if cur_iter == 1: arcpy.CopyFeatures_management(circle_fc, all_circles_fc) else: arcpy.Append_management(circle_fc, all_circles_fc, "TEST") gprint('Finished iteration #' + str(cur_iter)) start_time1 = lu.elapsed_time(start_time1) gprint('\nDone with iterations.') start_time = lu.elapsed_time(start_time) gprint('Outputs saved in: ' + output_gdb) gprint('Back up your project directories if you want to save ' 'corridor/barrier results.') # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Iteration script failed. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except Exception: lu.dashline(1) gprint('****Iteration script failed. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def main(): """Iterates over LM, BM, and restoration tasks""" ## USER SETTINGS ###################################################### ## Restoration Settings ## ALL input data must be in the same projection start_time = time.clock() restoreMaxROI = False # Set to True to restore highest ROI # Set to False to restore strongest barrier restoredResistanceVal = 1 # Resistance value of restored habitat. Must be 1 or greater. restorationDataGDB = ( "C:\\barrierClassAnalysis\\RestorationINPUTS_July2013.gdb" ) # No spaces or special chars in paths or gdb names outputDir = "C:\\barrierClassAnalysis\\output" # No spaces in path, avoid using dropbox or network drive # Project directories will be created in this (iter1, iter2...) # as will an output geodatabase resistanceRaster = "URWA_resis" # Resistance raster. Should be in input GDB coreFC = "URWA_HCAs_Doug_Grant" # Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant' coreFN = "HCA_ID" # Core area field name radius = 450 # restoration radius in meters iterations = 13 # number of restorations to perform minAgThreshold = 0.75 # if less than this proportion of ag in circle, don't consider restoring circle minImprovementVal = ( 0 ) # Don't consider barriers below this improvement score (average improvement per meter diameter restored) parcelCostRaster = ( "DougGrantParcelCost_m2_projected_90m" ) # Average per-m2 parcel cost per pixel. Snapped to resistance raster. restorationCostRaster = "restCostPer_m2" # Right now this is just a raster with all pixels set to 0.113174 agRaster = "ARESmaskp_projected" # 1=Ag, 0 = not Ag barrierCombineMethod = "Maximum" # Some restorations benefit multiple corridors. # 'Maximum' takes the greatest improvement across core area pairs # 'Sum' adds improvement scores acreoss all pairs. cwdThresh = None # Use cwdThresh = None for no threshold. Use cwdThresh = X to not consider # restorations more than X map units away from each core area. ## END USER SETTINGS ###################################################### try: # Setup path and create directories gprint("Hey! Make sure everything is in the same projection!\n") gprint("Setting up paths and creating directories") sys.path.append("..\\toolbox\\scripts") resRast = os.path.join(restorationDataGDB, resistanceRaster) coreFCPath = os.path.join(restorationDataGDB, coreFC) # Set up a NEW output gdb (leave previous ones on drive) for i in range(1, 200): outputGDB = "restorationOutput" + str(i) + ".gdb" if not arcpy.Exists(os.path.join(outputDir, outputGDB)): break gprint("Previous output GDB " + outputGDB + " exists. Delete to save disk space.") arcpy.CreateFileGDB_management(outputDir, outputGDB) outputGDB = os.path.join(outputDir, outputGDB) logFile = os.path.join(outputGDB, "Iterate Barriers" + str(i) + ".py") shutil.copyfile(__file__, logFile) # write a copy of this file to output dir as a record of settings arcpy.env.cellSize = resRast arcpy.env.extent = resRast arcpy.env.snapRaster = resRast arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB spatialref = arcpy.Describe(resRast).spatialReference mapunits = spatialref.linearUnitName gprint("Cell size = " + str(arcpy.env.cellSize) + " " + mapunits + "s") # Calculate fraction of ag within radius of each pixel gprint("Calculating purchase cost, fraction of ag, etc within radius of each pixel.") agRaster = os.path.join(restorationDataGDB, agRaster) inNeighborhood = NbrCircle(radius, "MAP") arcpy.env.extent = agRaster outFocalStats = arcpy.sa.FocalStatistics(agRaster, inNeighborhood, "MEAN", "NODATA") proportionAgRaster = os.path.join(outputGDB, "proportionAgRas") outFocalStats.save(proportionAgRaster) arcpy.env.extent = resRast # Calculate purchase cost of circles parcelCostRaster = os.path.join(restorationDataGDB, parcelCostRaster) arcpy.env.extent = parcelCostRaster outFocalStats = arcpy.sa.FocalStatistics(parcelCostRaster, inNeighborhood, "MEAN", "DATA") costFocalStatsRaster = os.path.join(outputGDB, "costFocalStatsRaster") outFocalStats.save(costFocalStatsRaster) arcpy.env.extent = resRast circleArea = float(npy.pi * radius * radius) outras = Raster(costFocalStatsRaster) * circleArea purchCostRaster = os.path.join(outputGDB, "purchaseCostRaster") outras.save(purchCostRaster) lu.delete_data(costFocalStatsRaster) # restCost = npy.pi * radius * radius * restCostPer_m2 restorationCostRaster = os.path.join(restorationDataGDB, restorationCostRaster) outras = Raster(purchCostRaster) + (Raster(restorationCostRaster) * radius * radius * npy.pi) totalCostRaster = os.path.join(outputGDB, "totalCostRaster") outras.save(totalCostRaster) # lu.build_stats(totalCostRaster) # Create mask to remove areas without cost data arcpy.env.extent = totalCostRaster costMaskRaster = os.path.join(outputGDB, "costMaskRaster") costThresh = 0 outCon = arcpy.sa.Con((Raster(totalCostRaster) > float(costThresh)), 1) outCon.save(costMaskRaster) arcpy.env.extent = resRast # Create mask to remove areas below ag threshold outCon = arcpy.sa.Con((Raster(proportionAgRaster) > float(minAgThreshold)), 1) agMaskRaster = os.path.join(outputGDB, "agMaskRaster") outCon.save(agMaskRaster) doStep1 = "true" doStep2 = "true" doStep5 = "false" for iter in range(1, iterations + 1): # xxx start_time1 = time.clock() arcpy.env.cellSize = resRast # Some env settings get changed by linkage mapper and must be reset here arcpy.env.extent = resRast arcpy.env.snapRaster = resRast arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB lu.dashline(1) gprint("Running iteration number " + str(iter)) projDir = os.path.join(outputDir, "iter" + str(iter) + "Proj") lu.create_dir(outputDir) lu.delete_dir(projDir) # xxx lu.create_dir(projDir) if iter > 1: # Copy previous s2 linktable to new project directory datapassDir = os.path.join(projDir, "datapass") lu.create_dir(datapassDir) projDir1 = os.path.join(outputDir, "iter1Proj") datapassDirIter1 = os.path.join(projDir1, "datapass") s2LinktableIter1 = os.path.join(datapassDirIter1, "linkTable_s2.csv") s2LinkTable = os.path.join(datapassDir, "linkTable_s2.csv") shutil.copyfile(s2LinktableIter1, s2LinkTable) # Run Linkage Mapper distFile = os.path.join( outputDir, coreFC + "_dists.txt" ) # Copy distances text file from earlier LM run to the output directory- speeds things up! if not os.path.exists(distFile): if iter == 1: gprint("Will calculate distance file.") distFile = "#" else: projDir1 = os.path.join(outputDir, "iter1Proj") distFile1 = os.path.join(projDir1, coreFC + "_dists.txt") shutil.copyfile(distFile1, distFile) # Put a copy here for future runs arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB argv = ( "lm_master.py", projDir, coreFCPath, coreFN, resRast, doStep1, doStep2, "Cost-Weighted & Euclidean", distFile, "true", "true", "false", "4", "Cost-Weighted", "true", doStep5, "10000", "#", "#", ) gprint("Running " + str(argv)) import lm_master # xxx lm_master.lm_master(argv) # xxx doStep1 = "false" # Can skip for future iterations doStep2 = "false" # Can skip for future iterations doStep5 = "false" # Skipping for future iterations startRadius = str(radius) endRadius = str(radius) radiusStep = "0" saveRadiusRasters = "false" writePctRasters = "false" argv = ( "barrier_master.py", projDir, resRast, startRadius, endRadius, radiusStep, barrierCombineMethod, saveRadiusRasters, writePctRasters, cwdThresh, ) gprint("Running " + str(argv)) import barrier_master # xxx barrier_master.bar_master(argv) # xxx arcpy.env.cellSize = resRast # Some env settings get changed by linkage mapper and must be reset here arcpy.env.extent = resRast arcpy.env.snapRaster = resRast arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB gprint("Finding restoration circles with max barrier score / ROI") # Find points with max ROI PREFIX = os.path.basename(projDir) if barrierCombineMethod == "Sum": sumSuffix = "Sum" else: sumSuffix = "" barrierFN = PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius) barrierRaster = os.path.join(projDir, "output", "barriers.gdb", barrierFN) if not arcpy.Exists(barrierRaster): msg = "Error: cannot find barrier output: " + barrierRaster lu.raise_error(msg) # arcpy.env.cellSize = agMaskRaster # arcpy.env.extent = agMaskRaster if iter > 1: gprint("Creating mask for previously restored areas") inNeighborhood = NbrCircle(radius, "MAP") arcpy.env.extent = allRestoredAreasRaster outFocalStats = arcpy.sa.FocalStatistics(allRestoredAreasRaster, inNeighborhood, "MEAN", "DATA") allRestoredFocalRaster = os.path.join(outputGDB, "allRestFocRas_iter" + str(iter)) outFocalStats.save(allRestoredFocalRaster) # Anything > 0 would include a restored area and arcpy.env.extent = resRast restMaskRaster = os.path.join(outputGDB, "restMaskRaster_iter" + str(iter)) minval = 0 outCon = arcpy.sa.Con((Raster(allRestoredFocalRaster) == float(minval)), 1) outCon.save(restMaskRaster) # Candidate areas have not been restored, have cost data, meet # minimum improvement score criteria, and have enough ag in them candidateBarrierRaster = os.path.join(outputGDB, "candidateBarrierRaster" + "_iter" + str(iter)) if iter > 1: gprint( "Creating candidate restoration raster using barrier results, previous restorations, and selection criteria" ) outCalc = ( Raster(costMaskRaster) * Raster(agMaskRaster) * Raster(barrierRaster) * Raster(restMaskRaster) * (radius * 2) ) # ROI scores will be in terms of total improvement (= score * diameter) else: outCalc = Raster(costMaskRaster) * Raster(agMaskRaster) * Raster(barrierRaster) * radius * 2 minBarrierScore = minImprovementVal * radius * 2 if restoredResistanceVal != 1: outCalc2 = outCalc - (2 * radius * (restoredResistanceVal - 1)) outCon = arcpy.sa.Con((outCalc2 >= float(minBarrierScore)), outCalc2) else: outCon = arcpy.sa.Con((outCalc >= float(minBarrierScore)), outCalc) outCon.save(candidateBarrierRaster) lu.build_stats(candidateBarrierRaster) purchaseRoiRaster = os.path.join(outputGDB, "purchaseRoiRaster" + "_iter" + str(iter)) outCalc = Raster(candidateBarrierRaster) / Raster(purchCostRaster) outCalc.save(purchaseRoiRaster) lu.build_stats(purchaseRoiRaster) totalRoiRaster = os.path.join(outputGDB, "purchaseRestRoiRaster" + "_iter" + str(iter)) outCalc = Raster(candidateBarrierRaster) / Raster(totalCostRaster) outCalc.save(totalRoiRaster) lu.build_stats(totalRoiRaster) maxBarrier = arcpy.GetRasterProperties_management(candidateBarrierRaster, "MAXIMUM") gprint("Maximum barrier improvement score: " + str(maxBarrier.getOutput(0))) if maxBarrier < 0: arcpy.AddWarning("\nNo barriers found that meet CWD or Ag threshold criteria.") maxPurchROI = arcpy.GetRasterProperties_management(purchaseRoiRaster, "MAXIMUM") gprint("Maximum purchase ROI score: " + str(maxPurchROI.getOutput(0))) maxROI = arcpy.GetRasterProperties_management(totalRoiRaster, "MAXIMUM") gprint("Maximum total ROI score: " + str(maxROI.getOutput(0))) if restoreMaxROI: outPoint = os.path.join(outputGDB, "maxRoiPoint" + "_iter" + str(iter)) gprint("Choosing circle with maximum ROI to restore") outCon = arcpy.sa.Con((Raster(totalRoiRaster) >= float(maxROI.getOutput(0))), totalRoiRaster) maxRoiRaster = os.path.join(outputGDB, "maxRoiRaster") outCon.save(maxRoiRaster) # Save max ROI to point try: arcpy.RasterToPoint_conversion(maxRoiRaster, outPoint) except: msg = "Error: it looks like there are no viable restoration candidates." lu.raise_error(msg) else: # Restoring strongest barrier instead outPoint = os.path.join(outputGDB, "maxBarrierPoint" + "_iter" + str(iter)) gprint("Choosing circle with maximum BARRIER IMPROVEMENT SCORE to restore") outCon = arcpy.sa.Con( (Raster(candidateBarrierRaster) >= float(maxBarrier.getOutput(0))), candidateBarrierRaster ) maxBarrierRaster = os.path.join(outputGDB, "maxBarrierRaster") outCon.save(maxBarrierRaster) # Save max barrier to point try: arcpy.RasterToPoint_conversion(maxBarrierRaster, outPoint) except: msg = "Error: it looks like there are no viable restoration candidates." lu.raise_error(msg) gprint("Done evaluating candidate restorations") result = int(arcpy.GetCount_management(outPoint).getOutput(0)) if result > 1: arcpy.AddWarning( "Deleting points with identical ROI/improvement score values" ) # Would be better to retain point with max barrier score when we have multiple points with same ROI arcpy.DeleteIdentical_management(outPoint, "grid_code", 0.1, 0.1) arcpy.sa.ExtractMultiValuesToPoints( outPoint, [ [candidateBarrierRaster, "barrierScore"], [purchCostRaster, "purchCost"], [totalCostRaster, "totalCost"], [purchaseRoiRaster, "purchaseROI"], [totalRoiRaster, "totalROI"], ], "NONE", ) arcpy.AddField_management(outPoint, "restorationNumber", "SHORT") arcpy.CalculateField_management(outPoint, "restorationNumber", iter) arcpy.AddField_management(outPoint, "radius", "DOUBLE") arcpy.CalculateField_management(outPoint, "radius", radius) arcpy.AddField_management(outPoint, "barrierScore_per_m", "DOUBLE") arcpy.CalculateField_management( outPoint, "barrierScore_per_m", "(float(!barrierScore!) / (!radius! * 2))", "PYTHON" ) gprint("\nCreating restoration circles") if restoreMaxROI: circleFC = os.path.join(outputGDB, "maxRoiCircle" + "_iter" + str(iter)) else: circleFC = os.path.join(outputGDB, "maxBarrierCircle" + "_iter" + str(iter)) arcpy.Buffer_analysis(outPoint, circleFC, radius) gprint("Rasterizing restoration circles") if restoreMaxROI: circleRas = os.path.join(outputGDB, "maxRoiCircleRas" + "_iter" + str(iter)) else: circleRas = os.path.join(outputGDB, "maxBarrierCircleRas" + "_iter" + str(iter)) arcpy.FeatureToRaster_conversion(circleFC, "totalROI", circleRas, arcpy.env.cellSize) # restore raster gprint("Digitally restoring resistance raster") resRastRestored = os.path.join(outputGDB, "resRastRestored" + "_iter" + str(iter)) outCon = arcpy.sa.Con(IsNull(circleRas), resRast, restoredResistanceVal) outCon.save(resRastRestored) allRestoredAreasRaster = os.path.join(outputGDB, "allRestoredAreas_iter" + str(iter)) PrevRestoredAreasRaster = os.path.join(outputGDB, "allRestoredAreas_iter" + str(iter - 1)) if iter == 1: outCon = arcpy.sa.Con(IsNull(circleRas), 0, 1) else: outCon = arcpy.sa.Con( IsNull(circleRas), PrevRestoredAreasRaster, 1 ) # Add this restoration to areas restored outCon.save(allRestoredAreasRaster) lu.delete_data(circleRas) resRast = resRastRestored # Use for next iteration resistance raster # Add circle into feature class with all circles if restoreMaxROI: allCirclesFC = os.path.join(outputGDB, "allCirclesMaxROI") else: allCirclesFC = os.path.join(outputGDB, "allCirclesMaxBarriers") if iter == 1: arcpy.CopyFeatures_management(circleFC, allCirclesFC) else: arcpy.Append_management(circleFC, allCirclesFC, "TEST") gprint("Finished iteration #" + str(iter)) start_time1 = lu.elapsed_time(start_time1) gprint("\nDone with iterations.") start_time = lu.elapsed_time(start_time) gprint("Outputs saved in: " + outputGDB) gprint("Back up your project directories if you want to save corridor/barrier results.") # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint("****Iteration script failed. Details follow.****") lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint("****Iteration script failed. Details follow.****") lu.exit_with_python_error(_SCRIPT_NAME)
def STEP3_calc_cwds(): """Calculates cost-weighted distances from each core area. Uses bounding circles around source and target cores to limit extent of cwd calculations and speed computation. """ try: lu.dashline(1) gprint('Running script ' + _SCRIPT_NAME) lu.dashline(0) # Super secret setting to re-start failed run. Enter 'RESTART' as the # Name of the pairwise distance table in step 2, and uncheck step 2. # We can eventually place this in a .ini file. rerun = False if cfg.S2EUCDISTFILE != None: if cfg.S2EUCDISTFILE.lower() == "restart": rerun = True # if cfg.TMAXCWDIST is None: # gprint('NOT using a maximum cost-weighted distance.') # else: # gprint('Max cost-weighted distance for CWD calcs set ' # 'to ' + str(cfg.TMAXCWDIST) + '\n') if (cfg.BUFFERDIST) is not None: gprint('Bounding circles plus a buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units will ' 'be used \n to limit extent of cost distance ' 'calculations.') elif cfg.TOOL <> cfg.TOOL_CC: gprint('NOT using bounding circles in cost distance ' 'calculations.') # set the analysis extent and cell size # So we don't extract rasters that go beyond extent of original raster if arcpy: arcpy.env.cellSize = cfg.RESRAST arcpy.env.extent="MINOF" else: gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight gp.Extent = "MINOF" gp.mask = cfg.RESRAST if arcpy: arcpy.env.overwriteOutput = True arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR else: gp.OverwriteOutput = True gp.workspace = cfg.SCRATCHDIR gp.scratchWorkspace = cfg.ARCSCRATCHDIR # Load linkTable (created in previous script) linkTableFile = lu.get_prev_step_link_table(step=3) linkTable = lu.load_link_table(linkTableFile) lu.report_links(linkTable) # Identify cores to map from LinkTable coresToMap = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) numCoresToMap = len(coresToMap) if numCoresToMap < 3: # No need to check for intermediate cores, because there aren't any cfg.S3DROPLCCSic = False else: cfg.S3DROPLCCSic = cfg.S3DROPLCCS gprint('\nNumber of core areas to connect: ' + str(numCoresToMap)) if rerun: # If picking up a failed run, make sure needed files are there lu.dashline(1) gprint ('\n****** RESTART MODE ENABLED ******\n') gprint ('**** NOTE: This mode picks up step 3 where a\n' 'previous run left off due to a crash or user\n' 'abort. It assumes you are using the same input\n' 'data used in the terminated run.\n\n') lu.warn('IMPORTANT: Your LCP and stick feature classes\n' 'will LOSE LCPs that were already created, but\n' 'your final raster corridor map should be complete.\n') lu.dashline(0) lu.snooze(10) savedLinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") coreListFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") if not path.exists(savedLinkTableFile) or not path.exists( coreListFile): gprint('No partial results file found from previous ' 'stopped run. Starting run from beginning.\n') lu.dashline(0) rerun = False # If picking up a failed run, use old folders if not rerun: startIndex = 0 if cfg.TOOL <> cfg.TOOL_CC: lu.make_cwd_paths(max(coresToMap)) # Set up cwd directories # make a feature layer for input cores to select from gp.MakeFeatureLayer(cfg.COREFC, cfg.FCORES) # Drop links that are too long gprint('\nChecking for corridors that are too long to map.') DISABLE_LEAST_COST_NO_VAL = False linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, 0, cfg.MAXCOSTDIST, 0, DISABLE_LEAST_COST_NO_VAL) # ------------------------------------------------------------------ # Bounding boxes if (cfg.BUFFERDIST) is not None: # create bounding boxes around cores start_time = time.clock() # lu.dashline(1) gprint('Calculating bounding boxes for core areas.') extentBoxList = npy.zeros((0,5), dtype='float32') for x in range(len(coresToMap)): core = coresToMap[x] boxCoords = lu.get_extent_box_coords(core) extentBoxList = npy.append(extentBoxList, boxCoords, axis=0) gprint('\nDone calculating bounding boxes.') start_time = lu.elapsed_time(start_time) # lu.dashline() # Bounding circle code if cfg.BUFFERDIST is not None: # Make a set of circles encompassing core areas we'll be connecting start_time = time.clock() gprint('Calculating bounding circles around potential' ' corridors.') # x y corex corey radius- stores data for bounding circle centroids boundingCirclePointArray = npy.zeros((0,5), dtype='float32') circleList = npy.zeros((0,3), dtype='int32') numLinks = linkTable.shape[0] for x in range(0, numLinks): if ((linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_CORR) or (linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_KEEP)): # if it's a valid corridor link linkId = int(linkTable[x,cfg.LTB_LINKID]) # fixme- this code is clumsy- can trim down cores = npy.zeros((1,3), dtype='int32') cores[0,:] = npy.sort([0, linkTable[x,cfg.LTB_CORE1], linkTable[x,cfg.LTB_CORE2]]) corex = cores[0,1] corey = cores[0,2] cores[0,0] = linkId ################### foundFlag = False for y in range(0,len(circleList)): # clumsy if (circleList[y,1] == corex and circleList[y,2] == corey): foundFlag = True if not foundFlag: circlePointData = ( lu.get_bounding_circle_data(extentBoxList, corex, corey, cfg.BUFFERDIST)) boundingCirclePointArray = ( npy.append(boundingCirclePointArray, circlePointData, axis=0)) # keep track of which cores we draw bounding circles # around circleList = npy.append(circleList, cores, axis=0) gprint('\nCreating bounding circles using buffer ' 'analysis.') dir, BNDCIRCENS = path.split(cfg.BNDCIRCENS) lu.make_points(cfg.SCRATCHDIR, boundingCirclePointArray, BNDCIRCENS) lu.delete_data(cfg.BNDCIRS) gp.buffer_analysis(cfg.BNDCIRCENS, cfg.BNDCIRS, "radius") gp.deletefield (cfg.BNDCIRS, "BUFF_DIST") gprint('Successfully created bounding circles around ' 'potential corridors using \na buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.') start_time = lu.elapsed_time(start_time) gprint('Reducing global processing area using bounding ' 'circle plus buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.\n') extentBoxList = npy.zeros((0,5),dtype='float32') boxCoords = lu.get_extent_box_coords() extentBoxList = npy.append(extentBoxList,boxCoords,axis=0) extentBoxList[0,0] = 0 boundingCirclePointArray = npy.zeros((0,5),dtype='float32') circlePointData=lu.get_bounding_circle_data(extentBoxList, 0, 0, cfg.BUFFERDIST) dir, BNDCIRCEN = path.split(cfg.BNDCIRCEN) lu.make_points(cfg.SCRATCHDIR, circlePointData, BNDCIRCEN) lu.delete_data(cfg.BNDCIR) gp.buffer_analysis(cfg.BNDCIRCEN, cfg.BNDCIR, "radius") gprint('Extracting raster....') cfg.BOUNDRESIS = cfg.BOUNDRESIS + tif lu.delete_data(cfg.BOUNDRESIS) count = 0 statement = ( 'gp.ExtractByMask_sa(cfg.RESRAST, cfg.BNDCIR, cfg.BOUNDRESIS)') while True: try: exec statement randomerror() except: count,tryAgain = lu.retry_arc_error(count,statement) if not tryAgain: exec statement else: break gprint('\nReduced resistance raster extracted using ' 'bounding circle.') else: #if not using bounding circles, just go with resistance raster. cfg.BOUNDRESIS = cfg.RESRAST # --------------------------------------------------------------------- # Rasterize core areas to speed cost distance calcs # lu.dashline(1) gprint("Creating core area raster.") gp.SelectLayerByAttribute(cfg.FCORES, "CLEAR_SELECTION") if arcpy: arcpy.env.cellSize = cfg.BOUNDRESIS arcpy.env.extent = cfg.BOUNDRESIS else: gp.cellSize = gp.Describe(cfg.BOUNDRESIS).MeanCellHeight gp.extent = gp.Describe(cfg.BOUNDRESIS).extent if rerun: # saved linktable replaces the one now in memory linkTable = lu.load_link_table(savedLinkTableFile) coresToMapSaved = npy.loadtxt(coreListFile, dtype='Float64', comments='#', delimiter=',') startIndex = coresToMapSaved[0] # Index of core where we left off del coresToMapSaved gprint ('\n****** Re-starting run at core area number ' + str(int(coresToMap[startIndex]))+ ' ******\n') lu.dashline(0) if arcpy: arcpy.env.extent = "MINOF" else: gp.extent = "MINOF" #---------------------------------------------------------------------- # Loop through cores, do cwd calcs for each if cfg.TOOL == cfg.TOOL_CC: gprint("\nMapping least-cost paths.\n") else: gprint("\nStarting cost distance calculations.\n") lcpLoop = 0 failures = 0 x = startIndex endIndex = len(coresToMap) linkTableMod = linkTable.copy() while x < endIndex: startTime1 = time.clock() # Modification of linkTable in function was causing problems. so # make a copy: linkTablePassed = linkTableMod.copy() (linkTableReturned, failures, lcpLoop) = do_cwd_calcs(x, linkTablePassed, coresToMap, lcpLoop, failures) if failures == 0: # If iteration was successful, continue with next core linkTableMod = linkTableReturned sourceCore = int(coresToMap[x]) gprint('Done with all calculations for core ID #' + str(sourceCore) + '. ' + str(int(x + 1)) + ' of ' + str(endIndex) + ' cores have been processed.') start_time = lu.elapsed_time(startTime1) outlinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.write_link_table(linkTableMod, outlinkTableFile) # Increment loop counter x = x + 1 else: # If iteration failed, try again after a wait period delay_restart(failures) #---------------------------------------------------------------------- linkTable = linkTableMod # reinstate temporarily disabled links rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE] > 1000) linkTable[rows,cfg.LTB_LINKTYPE] = (linkTable[rows,cfg.LTB_LINKTYPE] - 1000) # Drop links that are too long DISABLE_LEAST_COST_NO_VAL = True linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST, cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL) # Write link table file outlinkTableFile = lu.get_this_step_link_table(step=3) gprint('Updating ' + outlinkTableFile) lu.write_link_table(linkTable, outlinkTableFile) linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s3.csv") lu.write_link_table(linkTable, linkTableLogFile) start_time = time.clock() gprint('Creating shapefiles with linework for links...') try: lu.write_link_maps(outlinkTableFile, step=3) except: lu.write_link_maps(outlinkTableFile, step=3) start_time = lu.elapsed_time(start_time) gprint('\nIndividual cost-weighted distance layers written ' 'to "cwd" directory. \n') gprint(outlinkTableFile + '\n updated with cost-weighted distances between core areas.') #Clean up temporary files for restart code tempFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") lu.delete_file(tempFile) tempFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.delete_file(tempFile) # Check if climate tool is calling linkage mapper if cfg.TOOL == cfg.TOOL_CC: coreList = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) for core in coreList: cwdRaster = lu.get_cwd_path(int(core)) back_rast = cwdRaster.replace("cwd_", "back_") lu.delete_data(back_rast) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def STEP8_calc_pinchpoints(): """ Maps pinch points in Linkage Mapper corridors using Circuitscape given CWD calculations from s3_calcCwds.py. """ try: lu.dashline(0) gprint('Running script ' + _SCRIPT_NAME) restartFlag = False if cfg.CWDCUTOFF < 0: cfg.CWDCUTOFF = cfg.CWDCUTOFF * -1 restartFlag = True # Restart code in progress CSPATH = lu.get_cs_path() outputGDB = path.join(cfg.OUTPUTDIR, path.basename(cfg.PINCHGDB)) arcpy.OverWriteOutput = True arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR arcpy.env.pyramid = "NONE" arcpy.env.rasterstatistics = "NONE" # set the analysis extent and cell size to that of the resistance # surface arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.snapraster = cfg.RESRAST resRaster = cfg.RESRAST arcpy.env.extent = "MINOF" minObject = arcpy.GetRasterProperties_management(resRaster, "MINIMUM") rasterMin = float(str(minObject.getOutput(0))) if rasterMin <= 0: msg = ('Error: resistance raster cannot have 0 or negative values.') lu.raise_error(msg) if cfg.DO_ADJACENTPAIRS: prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep = 8) if not arcpy.Exists(prevLcpShapefile): msg = ('Cannot find an LCP shapefile from step 5. Please ' 'rerun that step and any previous ones if necessary.') lu.raise_error(msg) # Remove lcp shapefile lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s8.shp") lu.delete_data(lcpShapefile) inLinkTableFile = lu.get_prev_step_link_table(step=8) linkTable = lu.load_link_table(inLinkTableFile) numLinks = linkTable.shape[0] numCorridorLinks = lu.report_links(linkTable) if numCorridorLinks == 0: lu.dashline(1) msg =('\nThere are no linkages. Bailing.') lu.raise_error(msg) if linkTable.shape[1] < 16: # If linktable has no entries from prior # centrality or pinchpint analyses extraCols = npy.zeros((numLinks, 6), dtype="float64") linkTable = linkTable[:,0:10] linkTable = npy.append(linkTable, extraCols, axis=1) linkTable[:, cfg.LTB_LCPLEN] = -1 linkTable[:, cfg.LTB_CWDEUCR] = -1 linkTable[:, cfg.LTB_CWDPATHR] = -1 linkTable[:, cfg.LTB_EFFRESIST] = -1 linkTable[:, cfg.LTB_CWDTORR] = -1 linkTable[:, cfg.LTB_CURRENT] = -1 del extraCols # set up directories for circuit and circuit mosaic grids # Create output geodatabase if not arcpy.Exists(cfg.PINCHGDB): arcpy.CreateFileGDB_management(cfg.OUTPUTDIR, path.basename(cfg.PINCHGDB)) mosaicRaster = path.join(cfg.CIRCUITBASEDIR, "current_mos" + tif) coresToProcess = npy.unique( linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) maxCoreNum = max(coresToProcess) del coresToProcess lu.dashline(0) coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1] coreList = npy.sort(coreList) #gprint('There are ' + str(len(npy.unique(coreList))) ' core areas.') INCIRCUITDIR = cfg.CIRCUITBASEDIR OUTCIRCUITDIR = path.join(cfg.CIRCUITBASEDIR, cfg.CIRCUITOUTPUTDIR_NM) CONFIGDIR = path.join(INCIRCUITDIR, cfg.CIRCUITCONFIGDIR_NM) # Cutoff value text to append to filenames cutoffText = str(cfg.CWDCUTOFF) if cutoffText[-6:] == '000000': cutoffText = cutoffText[0:-6]+'m' elif cutoffText[-3:] == '000': cutoffText = cutoffText[0:-3]+'k' if cfg.SQUARERESISTANCES: # Square resistance values squaredRaster = path.join(cfg.SCRATCHDIR,'res_sqr') arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR outRas = Raster(resRaster) * Raster(resRaster) outRas.save(squaredRaster) resRaster = squaredRaster if cfg.DO_ADJACENTPAIRS: linkLoop = 0 lu.dashline(1) gprint('Mapping pinch points in individual corridors \n' 'using Circuitscape.') lu.dashline(1) gprint('If you try to cancel your run and the Arc dialog hangs, ') gprint('you can kill Circuitscape by opening Windows Task Manager') gprint('and ending the cs_run.exe process.') lu.dashline(2) for x in range(0,numLinks): linkId = str(int(linkTable[x,cfg.LTB_LINKID])) if not (linkTable[x,cfg.LTB_LINKTYPE] > 0): continue linkLoop = linkLoop + 1 linkDir = path.join(cfg.SCRATCHDIR, 'link' + linkId) if restartFlag == True and path.exists(linkDir): gprint('continuing') continue restartFlag = False lu.create_dir(linkDir) start_time1 = time.clock() # source and target cores corex=int(coreList[x,0]) corey=int(coreList[x,1]) # Get cwd rasters for source and target cores cwdRaster1 = lu.get_cwd_path(corex) cwdRaster2 = lu.get_cwd_path(corey) lccNormRaster = path.join(linkDir, 'lcc_norm') arcpy.env.extent = "MINOF" link = lu.get_links_from_core_pairs(linkTable, corex, corey) lcDist = float(linkTable[link,cfg.LTB_CWDIST]) # Normalized lcc rasters are created by adding cwd rasters # and subtracting the least cost distance between them. outRas = Raster(cwdRaster1) + Raster(cwdRaster2) - lcDist outRas.save(lccNormRaster) #create raster mask resMaskRaster = path.join(linkDir, 'res_mask'+tif) #create raster mask outCon = arcpy.sa.Con(Raster(lccNormRaster) <= cfg.CWDCUTOFF, 1) outCon.save(resMaskRaster) # Convert to poly. Use as mask to clip resistance raster. resMaskPoly = path.join(linkDir, 'res_mask_poly.shp') arcpy.RasterToPolygon_conversion(resMaskRaster, resMaskPoly, "NO_SIMPLIFY") arcpy.env.extent = resMaskPoly # Includes 0 values in some cases with CP LI model if tif # so using ESRI Grid format resClipRasterMasked = path.join(linkDir, 'res_clip_m') # Extract masked resistance raster. # Needs to be float to get export to npy to work. outRas = arcpy.sa.ExtractByMask(resRaster, resMaskPoly) + 0.0 outRas.save(resClipRasterMasked) resNpyFN = 'resistances_link_' + linkId + '.npy' resNpyFile = path.join(INCIRCUITDIR, resNpyFN) numElements, numResistanceNodes = export_ras_to_npy(resClipRasterMasked, resNpyFile) totMem, availMem = lu.get_mem() # gprint('Total memory: str(totMem)) if numResistanceNodes / availMem > 2000000: lu.dashline(1) gwarn('Warning:') gwarn('Circuitscape can only solve 2-3 million nodes') gwarn('per gigabyte of available RAM. \nTotal physical RAM' ' on your machine is ~' + str(totMem) + ' GB. \nAvailable memory is ~'+ str(availMem) + ' GB. \nYour resistance raster has ' + str(numResistanceNodes) + ' nodes.') lu.dashline(2) corePairRaster = path.join(linkDir, 'core_pairs'+tif) arcpy.env.extent = resClipRasterMasked # Next result needs to be floating pt for numpy export outCon = arcpy.sa.Con(Raster(cwdRaster1) == 0, corex, arcpy.sa.Con(Raster(cwdRaster2) == 0, corey + 0.0)) outCon.save(corePairRaster) coreNpyFN = 'cores_link_' + linkId + '.npy' coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN) numElements, numNodes = export_ras_to_npy(corePairRaster, coreNpyFile) arcpy.env.extent = "MINOF" # Set circuitscape options and call options = lu.setCircuitscapeOptions() if cfg.WRITE_VOLT_MAPS == True: options['write_volt_maps']=True options['habitat_file'] = resNpyFile # if int(linkId) > 2: # options['habitat_file'] = 'c:\\test.dummy' options['point_file'] = coreNpyFile options['set_focal_node_currents_to_zero']=True outputFN = 'Circuitscape_link' + linkId + '.out' options['output_file'] = path.join(OUTCIRCUITDIR, outputFN) if numElements > 250000: options['print_timings']=True configFN = 'pinchpoint_config' + linkId + '.ini' outConfigFile = path.join(CONFIGDIR, configFN) lu.writeCircuitscapeConfigFile(outConfigFile, options) gprint('Processing link ID #' + str(linkId) + '. Resistance map' ' has ' + str(int(numResistanceNodes)) + ' nodes.') memFlag = call_circuitscape(CSPATH, outConfigFile) currentFN = ('Circuitscape_link' + linkId + '_cum_curmap.npy') currentMap = path.join(OUTCIRCUITDIR, currentFN) if not arcpy.Exists(currentMap): print_failure(numResistanceNodes, memFlag, 10) numElements, numNodes = export_ras_to_npy( resClipRasterMasked,resNpyFile) memFlag = call_circuitscape(CSPATH, outConfigFile) currentFN = ('Circuitscape_link' + linkId + '_cum_curmap.npy') currentMap = path.join(OUTCIRCUITDIR, currentFN) if not arcpy.Exists(currentMap): msg = ('\nCircuitscape failed. See error information above.') arcpy.AddError(msg) lu.write_log(msg) exit(1) # Either set core areas to nodata in current map or # divide each by its radius currentRaster = path.join(linkDir, "current" + tif) import_npy_to_ras(currentMap,corePairRaster,currentRaster) if cfg.WRITE_VOLT_MAPS == True: voltFN = ('Circuitscape_link' + linkId + '_voltmap_' + str(corex) + '_'+str(corey) + '.npy') voltMap = path.join(OUTCIRCUITDIR, voltFN) voltRaster = path.join(outputGDB, cfg.PREFIX + "_voltMap_"+ str(corex) + '_'+str(corey)) import_npy_to_ras(voltMap,corePairRaster,voltRaster) gprint('Building output statistics and pyramids ' 'for voltage raster\n') lu.build_stats(voltRaster) arcpy.env.extent = currentRaster if SETCORESTONULL: # Set core areas to NoData in current map for color ramping currentRaster2 = currentRaster + '2' + tif outCon = arcpy.sa.Con(arcpy.sa.IsNull(Raster (corePairRaster)), Raster(currentRaster)) outCon.save(currentRaster2) currentRaster = currentRaster2 arcpy.env.extent = "MAXOF" if linkLoop == 1: lu.delete_data(mosaicRaster) @retry(10) def copyRas2(): arcpy.CopyRaster_management(currentRaster, mosaicRaster) copyRas2() else: @retry(10) def mosaicRas(): arcpy.Mosaic_management(currentRaster, mosaicRaster, "MAXIMUM", "MATCH") mosaicRas() resistancesFN = ('Circuitscape_link' + linkId + '_resistances_3columns.out') resistancesFile = path.join(OUTCIRCUITDIR,resistancesFN) resistances = npy.loadtxt(resistancesFile, dtype = 'Float64', comments='#') resistance = float(str(arcpy.env.cellSize)) * resistances[2] linkTable[link,cfg.LTB_EFFRESIST] = resistance # Ratio if not cfg.SQUARERESISTANCES: linkTable[link,cfg.LTB_CWDTORR] = (linkTable[link, cfg.LTB_CWDIST] / linkTable[link,cfg.LTB_EFFRESIST]) # Clean up if cfg.SAVE_TEMP_CIRCUIT_FILES == False: lu.delete_file(coreNpyFile) coreNpyBase, extension = path.splitext(coreNpyFile) lu.delete_data(coreNpyBase + '.hdr') lu.delete_file(resNpyFile) resNpyBase, extension = path.splitext(resNpyFile) lu.delete_data(resNpyBase + '.hdr') lu.delete_file(currentMap) curMapBase, extension = path.splitext(currentMap) lu.delete_data(curMapBase + '.hdr') lu.delete_data(currentRaster) lu.clean_out_workspace(linkDir) lu.delete_dir(linkDir) gprint('Finished with link ID #' + str(linkId) + '. ' + str(linkLoop) + ' out of ' + str(numCorridorLinks) + ' links have been processed.') start_time1 = lu.elapsed_time(start_time1) outputRaster = path.join(outputGDB, cfg.PREFIX + "_current_adjacentPairs_" + cutoffText) lu.delete_data(outputRaster) @retry(10) def copyRas(): arcpy.CopyRaster_management(mosaicRaster, outputRaster) copyRas() gprint('Building output statistics and pyramids ' 'for corridor pinch point raster\n') lu.build_stats(outputRaster) finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=5, thisStep=8) linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile) linkTableFinalFile = path.join(cfg.OUTPUTDIR, cfg.PREFIX + "_linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFinalFile, inLinkTableFile) gprint('Copy of linkTable written to '+ linkTableFinalFile) #fixme: update sticks? gprint('Creating shapefiles with linework for links.') lu.write_link_maps(linkTableFinalFile, step=8) # Copy final link maps to gdb. lu.copy_final_link_maps(step=8) lu.delete_data(mosaicRaster) if not cfg.DO_ALLPAIRS: # Clean up temporary files if not cfg.SAVECURRENTMAPS: lu.delete_dir(OUTCIRCUITDIR) return lu.dashline(1) gprint('Mapping global pinch points among all\n' 'core area pairs using Circuitscape.') if cfg.ALL_PAIR_SCENARIO=='pairwise': gprint('Circuitscape will be run in PAIRWISE mode.') else: gprint('Circuitscape will be run in ALL-TO-ONE mode.') arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST S8CORE_RAS = "s8core_ras" s8CoreRasPath = path.join(cfg.SCRATCHDIR,S8CORE_RAS) arcpy.FeatureToRaster_conversion(cfg.COREFC, cfg.COREFN, s8CoreRasPath, arcpy.env.cellSize) binaryCoreRaster = path.join(cfg.SCRATCHDIR,"core_ras_bin") # The following commands cause file lock problems on save. using gp # instead. # outCon = arcpy.sa.Con(S8CORE_RAS, 1, "#", "VALUE > 0") # outCon.save(binaryCoreRaster) # gp.Con_sa(s8CoreRasPath, 1, binaryCoreRaster, "#", "VALUE > 0") outCon = arcpy.sa.Con(Raster(s8CoreRasPath) > 0, 1) outCon.save(binaryCoreRaster) s5corridorRas = path.join(cfg.OUTPUTGDB,cfg.PREFIX + "_corridors") if not arcpy.Exists(s5corridorRas): s5corridorRas = path.join(cfg.OUTPUTGDB,cfg.PREFIX + "_lcc_mosaic_int") outCon = arcpy.sa.Con(Raster(s5corridorRas) <= cfg.CWDCUTOFF, Raster( resRaster), arcpy.sa.Con(Raster( binaryCoreRaster) > 0, Raster(resRaster))) resRasClipPath = path.join(cfg.SCRATCHDIR,'res_ras_clip') outCon.save(resRasClipPath) arcpy.env.cellSize = resRasClipPath arcpy.env.extent = resRasClipPath s8CoreRasClipped = s8CoreRasPath + '_c' # Produce core raster with same extent as clipped resistance raster # added to ensure correct data type- nodata values were positive for # cores otherwise outCon = arcpy.sa.Con(arcpy.sa.IsNull(Raster(s8CoreRasPath)), -9999, Raster(s8CoreRasPath)) outCon.save(s8CoreRasClipped) resNpyFN = 'resistances.npy' resNpyFile = path.join(INCIRCUITDIR, resNpyFN) numElements, numResistanceNodes = export_ras_to_npy(resRasClipPath,resNpyFile) totMem, availMem = lu.get_mem() # gprint('Total memory: str(totMem)) if numResistanceNodes / availMem > 2000000: lu.dashline(1) gwarn('Warning:') gwarn('Circuitscape can only solve 2-3 million nodes') gwarn('per gigabyte of available RAM. \nTotal physical RAM ' 'on your machine is ~' + str(totMem) + ' GB. \nAvailable memory is ~'+ str(availMem) + ' GB. \nYour resistance raster has ' + str(numResistanceNodes) + ' nodes.') lu.dashline(0) coreNpyFN = 'cores.npy' coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN) numElements, numNodes = export_ras_to_npy(s8CoreRasClipped,coreNpyFile) arcpy.env.extent = "MINOF" options = lu.setCircuitscapeOptions() options['scenario']=cfg.ALL_PAIR_SCENARIO options['habitat_file'] = resNpyFile options['point_file'] = coreNpyFile options['set_focal_node_currents_to_zero']=True outputFN = 'Circuitscape.out' options['output_file'] = path.join(OUTCIRCUITDIR, outputFN) options['print_timings']=True configFN = 'pinchpoint_allpair_config.ini' outConfigFile = path.join(CONFIGDIR, configFN) lu.writeCircuitscapeConfigFile(outConfigFile, options) gprint('\nResistance map has ' + str(int(numResistanceNodes)) + ' nodes.') lu.dashline(1) gprint('If you try to cancel your run and the Arc dialog hangs, ') gprint('you can kill Circuitscape by opening Windows Task Manager') gprint('and ending the cs_run.exe process.') lu.dashline(0) call_circuitscape(CSPATH, outConfigFile) # test = subprocess.call([CSPATH, outConfigFile], # creationflags = subprocess.CREATE_NEW_CONSOLE) if options['scenario']=='pairwise': rasterSuffix = "_current_allPairs_" + cutoffText else: rasterSuffix = "_current_allToOne_" + cutoffText currentFN = 'Circuitscape_cum_curmap.npy' currentMap = path.join(OUTCIRCUITDIR, currentFN) outputRaster = path.join(outputGDB, cfg.PREFIX + rasterSuffix) currentRaster = path.join(cfg.SCRATCHDIR, "current") try: import_npy_to_ras(currentMap,resRasClipPath,outputRaster) except: lu.dashline(1) msg = ('ERROR: Circuitscape failed. \n' 'Note: Circuitscape can only solve 2-3 million nodes' '\nper gigabyte of available RAM. The resistance ' '\nraster for the last corridor had ' + str(numResistanceNodes) + ' nodes.\n\nResistance ' 'raster values that vary by >6 orders of \nmagnitude' ' can also cause failures, as can a mismatch in ' '\ncore area and resistance raster extents.') arcpy.AddError(msg) lu.write_log(msg) exit(1) #set core areas to nodata if SETCORESTONULL: # Set core areas to NoData in current map for color ramping outputRasterND = outputRaster + '_noDataCores' outCon = arcpy.sa.SetNull(Raster(s8CoreRasClipped) > 0, Raster(outputRaster)) outCon.save(outputRasterND) gprint('\nBuilding output statistics and pyramids ' 'for centrality raster.') lu.build_stats(outputRaster) lu.build_stats(outputRasterND) # Clean up temporary files if not cfg.SAVECURRENTMAPS: lu.delete_dir(OUTCIRCUITDIR) # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Failed in step 8. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 8. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def generate_distance_file(): """Use ArcGIS to create Conefor distance file Requires ArcInfo license. """ try: #gp.Extent = gp.Describe(cfg.COREFC).Extent gp.CellSize = gp.Describe(cfg.RESRAST).MeanCellHeight S2COREFC = cfg.COREFC if cfg.SIMPLIFY_CORES: try: gprint('Simplifying polygons for core pair distance calculations') COREFC_SIMP = path.join(cfg.SCRATCHDIR, "CoreFC_Simp.shp") tolerance = float(gp.CellSize) / 3 try: import arcpy import arcpy.cartography as CA except: arcpy = False if arcpy: CA.SimplifyPolygon(cfg.COREFC, COREFC_SIMP, "POINT_REMOVE", tolerance, "#", "NO_CHECK") else: gp.SimplifyPolygon(cfg.COREFC, COREFC_SIMP, "POINT_REMOVE", tolerance, "#", "NO_CHECK") S2COREFC = COREFC_SIMP except: pass # In case point geometry is entered for core area FC gp.workspace = cfg.SCRATCHDIR FS2COREFC = "fcores" FS2COREFC2 = "fcores2" gp.MakeFeatureLayer(S2COREFC, FS2COREFC) gp.MakeFeatureLayer(S2COREFC, FS2COREFC2) output = [] csvseparator = "\t" adjList = get_full_adj_list() # sourceCores = npy.unique(adjList[:, 0]) gprint('\nFinding distances between cores using Generate Near Table.') # gp.OutputCoordinateSystem = gp.describe(cfg.COREFC).SpatialReference near_tbl = path.join(cfg.SCRATCHDIR, "neartbl.dbf") # gprint('old method') # start_time = time.clock() # gp.generateneartable(S2COREFC, S2COREFC, near_tbl, "#", # "NO_LOCATION", "NO_ANGLE", "ALL", "0") # start_time = lu.elapsed_time(start_time) gprint('There are ' + str(len(adjList)) + ' adjacent core pairs to ' 'process.') pctDone = 0 start_time = time.clock() for x in range(0, len(adjList)): pctDone = lu.report_pct_done(x, len(adjList), pctDone) sourceCore = adjList[x, 0] targetCore = adjList[x, 1] expression = cfg.COREFN + " = " + str(sourceCore) gp.selectlayerbyattribute(FS2COREFC, "NEW_SELECTION", expression) expression = cfg.COREFN + " = " + str(targetCore) gp.selectlayerbyattribute(FS2COREFC2, "NEW_SELECTION", expression) gp.generateneartable(FS2COREFC, FS2COREFC2, near_tbl, "#", "NO_LOCATION", "NO_ANGLE", "ALL", "0") rows = gp.searchcursor(near_tbl) row = rows.Next() minDist = 1e20 if row: # May be running on selected core areas in step 2 while row: dist = row.getvalue("NEAR_DIST") if dist <= 0: # In case simplified polygons abut one another dist = float(gp.CellSize) if dist < minDist: minDist = dist outputrow = [] outputrow.append(str(sourceCore)) outputrow.append(str(targetCore)) outputrow.append(str(dist)) del row row = rows.Next() del rows output.append(csvseparator.join(outputrow)) start_time = lu.elapsed_time(start_time) # In case coreFC is grouped in TOC, get coreFN for non-Arc statement group,coreFN = path.split(cfg.COREFC) dist_fname = path.join(cfg.PROJECTDIR, (coreFN + "_dists.txt")) dist_file = open(dist_fname, 'w') dist_file.write('\n'.join(output)) dist_file.close() gprint('Distance file ' + dist_fname + ' generated.\n') return dist_fname except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def cwadjacency(): """Calculate cost-weighted adjacency Inputs: gp - geoprocessing object """ try: ALLOC_RASFN = "CWD_alloc_ras" gprint('\nCalculating cost-weighted distance adjacency') outcsvfile = cfg.CWDADJFILE outcsvLogfile = path.join(cfg.LOGDIR, "cwdAdj_STEP1.csv") PREFIX = cfg.PREFIX # May need to set extent prior to core poly to raster conversion... # ---------------------------------------------- # Cost-weighted allocation code gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight gp.extent = gp.Describe(cfg.RESRAST).extent if cfg.BUFFERDIST is not None: # Clip resistance raster using bounding circle start_time = time.clock() gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight#xxx gp.extent = gp.Describe(cfg.RESRAST).Extent#xxx bResistance = path.join(cfg.SCRATCHDIR, "bResistance") gp.ExtractByMask_sa(cfg.RESRAST, cfg.BNDCIR, bResistance) gprint('\nReduced resistance raster extracted using ' 'bounding circle.') start_time = lu.elapsed_time(start_time) else: bResistance = cfg.RESRAST start_time = time.clock() gprint('Starting cost-weighted distance allocation...') # core_rastmp = 'core_rastmp' if cfg.TMAXCWDIST is not None: gprint('Maximum cost-weighted distance set to ' + str(cfg.TMAXCWDIST)) gp.CellSize = gp.Describe(bResistance).MeanCellHeight gp.extent = "MAXOF" gprint('Processing cell size: ' + gp.CellSize) gp.workspace = cfg.ADJACENCYDIR gp.scratchworkspace = cfg.ARCSCRATCHDIR lu.delete_data(cfg.CWDGDB) if not gp.exists(cfg.CWDGDB): gp.createfilegdb(cfg.OUTPUTDIR, path.basename(cfg.CWDGDB)) outDistanceRaster = path.join(cfg.CWDGDB, PREFIX + "_cwd") alloc_ras = path.join(cfg.ADJACENCYDIR, ALLOC_RASFN) lu.delete_data(alloc_ras) lu.delete_data(outDistanceRaster) count = 0 if arcpy: statement = ('costAllocOut = CostAllocation(cfg.CORERAS, ' 'bResistance, cfg.TMAXCWDIST, cfg.CORERAS,"VALUE", ' 'outDistanceRaster);' 'costAllocOut.save(alloc_ras)') else: statement = ('gp.Costallocation_sa(cfg.CORERAS, bResistance, ' 'alloc_ras, cfg.TMAXCWDIST, cfg.CORERAS, "VALUE", ' 'outDistanceRaster, "")') while True: try: exec statement except: count, tryAgain = lu.retry_arc_error(count, statement) if not tryAgain: exec statement else: break gprint('\nBuilding output statistics and pyramids for CWD raster.') lu.build_stats(outDistanceRaster) gp.scratchworkspace = cfg.ARCSCRATCHDIR gprint('Cost-weighted distance allocation done.') start_time = lu.elapsed_time(start_time) adjshiftwrite(alloc_ras, outcsvfile, outcsvLogfile) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 1. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 1. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def euadjacency(): """Calculate Euclidean adjacency Inputs: gp - geoprocessing object """ try: ALLOC_RASFN = "Euc_alloc_ras" lu.dashline() gprint('Calculating Euclidean adjacency') outcsvfile = cfg.EUCADJFILE outcsvLogfile = path.join(cfg.LOGDIR, "eucAdj_STEP1.csv") # ---------------------------------------------- # Euclidean allocation code gp.workspace = cfg.ADJACENCYDIR gprint('Starting Euclidean adjacency processing...') # Euclidean cell size cellSizeEuclidean = gp.Describe(cfg.RESRAST).MeanCellHeight oldextent = gp.extent if cfg.BUFFERDIST is not None: gp.extent = gp.Describe(cfg.BNDCIR).extent start_time = time.clock() gp.scratchworkspace = cfg.ARCSCRATCHDIR outDistanceRaster = path.join(cfg.ADJACENCYDIR, "euc") alloc_ras = path.join(cfg.ADJACENCYDIR, ALLOC_RASFN) lu.delete_data(alloc_ras) lu.delete_data(outDistanceRaster) count = 0 statement = ('gp.EucAllocation_sa(cfg.CORERAS, alloc_ras, "","", ' 'cellSizeEuclidean, "", outDistanceRaster, "")') while True: try: exec statement except: count, tryAgain = lu.retry_arc_error(count, statement) if not tryAgain: exec statement else: break gp.scratchworkspace = cfg.ARCSCRATCHDIR gprint('\nEuclidean distance allocation done.') start_time = lu.elapsed_time(start_time) gp.extent = oldextent adjshiftwrite(alloc_ras, outcsvfile, outcsvLogfile) # Clean up lu.delete_data(outDistanceRaster) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 1. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 1. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def doRadiusLoop(): linkTable = linkTableTemp.copy() startTime = time.clock() randomerror() linkLoop = 0 pctDone = 0 gprint('\nMapping barriers at a radius of ' + str(radius) + ' ' + str(mapUnits)) if cfg.SUM_BARRIERS: gprint('using SUM method') else: gprint('using MAXIMUM method') if numCorridorLinks > 1: gprint('0 percent done') lastMosaicRaster = None lastMosaicRasterPct = None for x in range(0,numLinks): pctDone = lu.report_pct_done(linkLoop, numCorridorLinks, pctDone) linkId = str(int(linkTable[x,cfg.LTB_LINKID])) if ((linkTable[x,cfg.LTB_LINKTYPE] > 0) and (linkTable[x,cfg.LTB_LINKTYPE] < 1000)): linkLoop = linkLoop + 1 # source and target cores corex=int(coreList[x,0]) corey=int(coreList[x,1]) # Get cwd rasters for source and target cores cwdRaster1 = lu.get_cwd_path(corex) cwdRaster2 = lu.get_cwd_path(corey) # Mask out areas above CWD threshold cwdTemp1 = None cwdTemp2 = None if cfg.BARRIER_CWD_THRESH is not None: if x == 1: lu.dashline(1) gprint(' Using CWD threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.env.snapRaster = cfg.RESRAST cwdTemp1 = path.join(cfg.SCRATCHDIR, "tmp"+str(corex)) outCon = arcpy.sa.Con(cwdRaster1 < float(cfg.BARRIER_CWD_THRESH),cwdRaster1) outCon.save(cwdTemp1) cwdRaster1 = cwdTemp1 cwdTemp2 = path.join(cfg.SCRATCHDIR, "tmp"+str(corey)) outCon = arcpy.sa.Con(cwdRaster2 < float(cfg.BARRIER_CWD_THRESH),cwdRaster2) outCon.save(cwdTemp2) cwdRaster2 = cwdTemp2 focalRaster1 = lu.get_focal_path(corex,radius) focalRaster2 = lu.get_focal_path(corey,radius) link = lu.get_links_from_core_pairs(linkTable, corex, corey) lcDist = float(linkTable[link,cfg.LTB_CWDIST]) # Detect barriers at radius using neighborhood stats # Create the Neighborhood Object innerRadius = radius - 1 outerRadius = radius dia = 2 * radius InNeighborhood = ("ANNULUS " + str(innerRadius) + " " + str(outerRadius) + " MAP") @retry(10) def execFocal(): randomerror() # Execute FocalStatistics if not path.exists(focalRaster1): arcpy.env.extent = cwdRaster1 outFocalStats = arcpy.sa.FocalStatistics(cwdRaster1, InNeighborhood, "MINIMUM","DATA") if setCoresToNull: outFocalStats2 = arcpy.sa.Con(outFocalStats > 0, outFocalStats) # Set areas overlapping cores to NoData xxx outFocalStats2.save(focalRaster1) #xxx else: outFocalStats.save(focalRaster1) #xxx arcpy.env.extent = cfg.RESRAST if not path.exists(focalRaster2): arcpy.env.extent = cwdRaster2 outFocalStats = arcpy.sa.FocalStatistics(cwdRaster2, InNeighborhood, "MINIMUM","DATA") if setCoresToNull: outFocalStats2 = arcpy.sa.Con(outFocalStats > 0, outFocalStats) # Set areas overlapping cores to NoData xxx outFocalStats2.save(focalRaster2)#xxx else: outFocalStats.save(focalRaster2) #xxx arcpy.env.extent = cfg.RESRAST execFocal() lu.delete_data(cwdTemp1) lu.delete_data(cwdTemp2) barrierRaster = path.join(cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey)+'.tif') if cfg.SUM_BARRIERS: # Need to set nulls to 0, also # create trim rasters as we go outRas = ((lcDist - Raster(focalRaster1) - Raster(focalRaster2) - dia) / dia) outCon = arcpy.sa.Con(IsNull(outRas),0,outRas) outCon2 = arcpy.sa.Con(outCon<0,0,outCon) outCon2.save(barrierRaster) # Execute FocalStatistics to fill out search radii InNeighborhood = "CIRCLE " + str(outerRadius) + " MAP" fillRaster = path.join(cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) +"_fill.tif") outFocalStats = arcpy.sa.FocalStatistics(barrierRaster, InNeighborhood, "MAXIMUM","DATA") outFocalStats.save(fillRaster) if cfg.WRITE_TRIM_RASTERS: trmRaster = path.join(cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) +"_trim.tif") rasterList = [fillRaster, resistFillRaster] outCellStatistics = arcpy.sa.CellStatistics( rasterList, "MINIMUM") outCellStatistics.save(trmRaster) else: #Calculate potential benefit per map unit restored @retry(10) def calcBen(): randomerror() outRas = ((lcDist - Raster(focalRaster1) - Raster(focalRaster2) - dia) / dia) outRas.save(barrierRaster) calcBen() if cfg.WRITE_PCT_RASTERS: #Calculate PERCENT potential benefit per unit restored barrierRasterPct = path.join(cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey)+'_pct.tif') @retry(10) def calcBenPct(): randomerror() outras = (100 * (Raster(barrierRaster) / lcDist)) outras.save(barrierRasterPct) calcBenPct() # Mosaic barrier results across core area pairs mosaicDir = path.join(cfg.SCRATCHDIR,'mos'+str(radId)+'_'+str(x+1)) lu.create_dir(mosaicDir) mosFN = 'mos_temp' tempMosaicRaster = path.join(mosaicDir,mosFN) tempMosaicRasterTrim = path.join(mosaicDir,'mos_temp_trm') arcpy.env.workspace = mosaicDir if linkLoop == 1: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(barrierRaster, tempMosaicRaster) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: arcpy.CopyRaster_management(trmRaster, tempMosaicRasterTrim) else: if cfg.SUM_BARRIERS: outCon = arcpy.sa.Con(Raster (barrierRaster) < 0, lastMosaicRaster, Raster(barrierRaster) + Raster(lastMosaicRaster)) outCon.save(tempMosaicRaster) if cfg.WRITE_TRIM_RASTERS: outCon = arcpy.sa.Con(Raster (trmRaster) < 0, lastMosaicRasterTrim, Raster(trmRaster) + Raster( lastMosaicRasterTrim)) outCon.save(tempMosaicRasterTrim) else: rasterString = ('"'+barrierRaster+";" + lastMosaicRaster+'"') @retry(10) def mosaicToNew(): randomerror() arcpy.MosaicToNewRaster_management( rasterString,mosaicDir,mosFN, "", "32_BIT_FLOAT", arcpy.env.cellSize, "1", "MAXIMUM", "MATCH") mosaicToNew() # gprint(str(corex)+'0'+str(corey)) if linkLoop>1: #Clean up from previous loop lu.delete_data(lastMosaicRaster) lastMosaicDir =path.dirname(lastMosaicRaster) lu.clean_out_workspace(lastMosaicDir) lu.delete_dir(lastMosaicDir) lastMosaicRaster = tempMosaicRaster if cfg.WRITE_TRIM_RASTERS: lastMosaicRasterTrim = tempMosaicRasterTrim if cfg.WRITE_PCT_RASTERS: mosPctFN = 'mos_temp_pct' mosaicDirPct = path.join(cfg.SCRATCHDIR,'mosP'+str(radId)+'_'+str(x+1)) lu.create_dir(mosaicDirPct) tempMosaicRasterPct = path.join(mosaicDirPct,mosPctFN) if linkLoop == 1: # If this is the first grid then copy # rather than mosaic if cfg.SUM_BARRIERS: outCon = arcpy.sa.Con(Raster(barrierRasterPct) < 0, 0, arcpy.sa.Con(IsNull( barrierRasterPct), 0, barrierRasterPct)) outCon.save(tempMosaicRasterPct) else: arcpy.CopyRaster_management(barrierRasterPct, tempMosaicRasterPct) else: if cfg.SUM_BARRIERS: @retry(10) def sumBarriers(): randomerror() outCon = arcpy.sa.Con(Raster(barrierRasterPct) < 0, lastMosaicRasterPct, Raster(barrierRasterPct) + Raster( lastMosaicRasterPct)) outCon.save(tempMosaicRasterPct) sumBarriers() else: rasterString = ('"' + barrierRasterPct + ";" + lastMosaicRasterPct + '"') @retry(10) def maxBarriers(): randomerror() arcpy.MosaicToNewRaster_management( rasterString,mosaicDirPct,mosPctFN, "", "32_BIT_FLOAT", arcpy.env.cellSize, "1", "MAXIMUM", "MATCH") maxBarriers() if linkLoop>1: #Clean up from previous loop lu.delete_data(lastMosaicRasterPct) lastMosaicDirPct =path.dirname(lastMosaicRasterPct) lu.clean_out_workspace(lastMosaicDirPct) lu.delete_dir(lastMosaicDirPct) # lu.delete_data(lastMosaicRasterPct) lastMosaicRasterPct = tempMosaicRasterPct if not cfg.SAVEBARRIERRASTERS: lu.delete_data(barrierRaster) if cfg.WRITE_PCT_RASTERS: lu.delete_data(barrierRasterPct) if cfg.WRITE_TRIM_RASTERS: lu.delete_data(trmRaster) # Temporarily disable links in linktable - # don't want to mosaic them twice for y in range (x+1,numLinks): corex1 = int(coreList[y,0]) corey1 = int(coreList[y,1]) if corex1 == corex and corey1 == corey: linkTable[y,cfg.LTB_LINKTYPE] = ( linkTable[y,cfg.LTB_LINKTYPE] + 1000) elif corex1==corey and corey1==corex: linkTable[y,cfg.LTB_LINKTYPE] = ( linkTable[y,cfg.LTB_LINKTYPE] + 1000) if numCorridorLinks > 1 and pctDone < 100: gprint('100 percent done') gprint('Summarizing barrier data for search radius.') #rows that were temporarily disabled rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE]>1000) linkTable[rows,cfg.LTB_LINKTYPE] = ( linkTable[rows,cfg.LTB_LINKTYPE] - 1000) # ----------------------------------------------------------------- # Set negative values to null or zero and write geodatabase. mosaicFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius)) mosaicRaster = path.join(cfg.BARRIERGDB, mosaicFN) arcpy.env.extent = cfg.RESRAST # if setCoresToNull: # outCon = arcpy.sa.Con(Raster(tempMosaicRaster) < 0, 0, # tempMosaicRaster) #xxx # outCon.save(mosaicRaster) #xxx # else: outSetNull = arcpy.sa.SetNull(tempMosaicRaster, tempMosaicRaster, "VALUE < 0") #xxx orig outSetNull.save(mosaicRaster) lu.delete_data(tempMosaicRaster) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: mosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + "_Rad" + str(radius)) mosaicRasterTrim = path.join(cfg.BARRIERGDB, mosaicFN) arcpy.CopyRaster_management(tempMosaicRasterTrim, mosaicRasterTrim) lu.delete_data(tempMosaicRaster) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + "_Rad" + str(radius)) arcpy.env.extent = cfg.RESRAST outSetNull = arcpy.sa.SetNull(tempMosaicRasterPct, tempMosaicRasterPct, "VALUE < 0") mosaicRasterPct = path.join(cfg.BARRIERGDB, mosaicPctFN) outSetNull.save(mosaicRasterPct) lu.delete_data(tempMosaicRasterPct) # 'Grow out' maximum restoration gain to # neighborhood size for display InNeighborhood = "CIRCLE " + str(outerRadius) + " MAP" # Execute FocalStatistics fillRasterFN = "barriers_fill" + str(outerRadius) + tif fillRaster = path.join(cfg.BARRIERBASEDIR, fillRasterFN) outFocalStats = arcpy.sa.FocalStatistics(mosaicRaster, InNeighborhood, "MAXIMUM","DATA") outFocalStats.save(fillRaster) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster fillRasterPctFN = "barriers_fill_pct" + str(outerRadius) + tif fillRasterPct = path.join(cfg.BARRIERBASEDIR, fillRasterPctFN) outFocalStats = arcpy.sa.FocalStatistics(mosaicRasterPct, InNeighborhood, "MAXIMUM","DATA") outFocalStats.save(fillRasterPct) #Place copies of filled rasters in output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fillRasterFN = (PREFIX + "_BarrrierCircles" + sumSuffix + "_Rad" + str(outerRadius)) arcpy.CopyRaster_management(fillRaster, fillRasterFN) if cfg.WRITE_PCT_RASTERS: fillRasterPctFN = (PREFIX + "_BarrrierCircles_Pct" + sumSuffix + "_Rad" + str(outerRadius)) arcpy.CopyRaster_management(fillRasterPct, fillRasterPctFN) if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: # Create pared-down version of filled raster- remove pixels # that don't need restoring by allowing a pixel to only # contribute its resistance value to restoration gain outRasterFN = "barriers_trm" + str(outerRadius) + tif outRaster = path.join(cfg.BARRIERBASEDIR,outRasterFN) rasterList = [fillRaster, resistFillRaster] outCellStatistics = arcpy.sa.CellStatistics(rasterList, "MINIMUM") outCellStatistics.save(outRaster) #SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER outRaster2FN = ("barriers_trm" + sumSuffix + str(outerRadius) + "_2" + tif) outRaster2 = path.join(cfg.BARRIERBASEDIR,outRaster2FN) output = arcpy.sa.Con(IsNull(fillRaster),fillRaster,outRaster) output.save(outRaster2) outRasterFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + "_Rad" + str(outerRadius)) outRasterPath= path.join(cfg.BARRIERGDB, outRasterFN) arcpy.CopyRaster_management(outRaster2, outRasterFN) randomerror() startTime=lu.elapsed_time(startTime)
def doRadiusLoop(): linkTable = linkTableTemp.copy() startTime = time.clock() randomerror() linkLoop = 0 pctDone = 0 gprint('\nMapping barriers at a radius of ' + str(radius) + ' ' + str(mapUnits)) if cfg.SUM_BARRIERS: gprint('using SUM method') else: gprint('using MAXIMUM method') if numCorridorLinks > 1: gprint('0 percent done') lastMosaicRaster = None lastMosaicRasterPct = None for x in range(0, numLinks): pctDone = lu.report_pct_done(linkLoop, numCorridorLinks, pctDone) linkId = str(int(linkTable[x, cfg.LTB_LINKID])) if ((linkTable[x, cfg.LTB_LINKTYPE] > 0) and (linkTable[x, cfg.LTB_LINKTYPE] < 1000)): linkLoop = linkLoop + 1 # source and target cores corex = int(coreList[x, 0]) corey = int(coreList[x, 1]) # Get cwd rasters for source and target cores cwdRaster1 = lu.get_cwd_path(corex) cwdRaster2 = lu.get_cwd_path(corey) # Mask out areas above CWD threshold cwdTemp1 = None cwdTemp2 = None if cfg.BARRIER_CWD_THRESH is not None: if x == 1: lu.dashline(1) gprint(' Using CWD threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.env.snapRaster = cfg.RESRAST cwdTemp1 = path.join(cfg.SCRATCHDIR, "tmp" + str(corex)) outCon = arcpy.sa.Con( cwdRaster1 < float(cfg.BARRIER_CWD_THRESH), cwdRaster1) outCon.save(cwdTemp1) cwdRaster1 = cwdTemp1 cwdTemp2 = path.join(cfg.SCRATCHDIR, "tmp" + str(corey)) outCon = arcpy.sa.Con( cwdRaster2 < float(cfg.BARRIER_CWD_THRESH), cwdRaster2) outCon.save(cwdTemp2) cwdRaster2 = cwdTemp2 focalRaster1 = lu.get_focal_path(corex, radius) focalRaster2 = lu.get_focal_path(corey, radius) link = lu.get_links_from_core_pairs( linkTable, corex, corey) lcDist = float(linkTable[link, cfg.LTB_CWDIST]) # Detect barriers at radius using neighborhood stats # Create the Neighborhood Object innerRadius = radius - 1 outerRadius = radius dia = 2 * radius InNeighborhood = ("ANNULUS " + str(innerRadius) + " " + str(outerRadius) + " MAP") @retry(10) def execFocal(): randomerror() # Execute FocalStatistics if not path.exists(focalRaster1): arcpy.env.extent = cwdRaster1 outFocalStats = arcpy.sa.FocalStatistics( cwdRaster1, InNeighborhood, "MINIMUM", "DATA") if setCoresToNull: outFocalStats2 = arcpy.sa.Con( outFocalStats > 0, outFocalStats ) # Set areas overlapping cores to NoData xxx outFocalStats2.save(focalRaster1) #xxx else: outFocalStats.save(focalRaster1) #xxx arcpy.env.extent = cfg.RESRAST if not path.exists(focalRaster2): arcpy.env.extent = cwdRaster2 outFocalStats = arcpy.sa.FocalStatistics( cwdRaster2, InNeighborhood, "MINIMUM", "DATA") if setCoresToNull: outFocalStats2 = arcpy.sa.Con( outFocalStats > 0, outFocalStats ) # Set areas overlapping cores to NoData xxx outFocalStats2.save(focalRaster2) #xxx else: outFocalStats.save(focalRaster2) #xxx arcpy.env.extent = cfg.RESRAST execFocal() lu.delete_data(cwdTemp1) lu.delete_data(cwdTemp2) barrierRaster = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + '.tif') if cfg.SUM_BARRIERS: # Need to set nulls to 0, also # create trim rasters as we go outRas = ((lcDist - Raster(focalRaster1) - Raster(focalRaster2) - dia) / dia) outCon = arcpy.sa.Con(IsNull(outRas), 0, outRas) outCon2 = arcpy.sa.Con(outCon < 0, 0, outCon) outCon2.save(barrierRaster) # Execute FocalStatistics to fill out search radii InNeighborhood = "CIRCLE " + str( outerRadius) + " MAP" fillRaster = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_fill.tif") outFocalStats = arcpy.sa.FocalStatistics( barrierRaster, InNeighborhood, "MAXIMUM", "DATA") outFocalStats.save(fillRaster) if cfg.WRITE_TRIM_RASTERS: trmRaster = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_trim.tif") rasterList = [fillRaster, resistFillRaster] outCellStatistics = arcpy.sa.CellStatistics( rasterList, "MINIMUM") outCellStatistics.save(trmRaster) else: #Calculate potential benefit per map unit restored @retry(10) def calcBen(): randomerror() outRas = ((lcDist - Raster(focalRaster1) - Raster(focalRaster2) - dia) / dia) outRas.save(barrierRaster) calcBen() if cfg.WRITE_PCT_RASTERS: #Calculate PERCENT potential benefit per unit restored barrierRasterPct = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + '_pct.tif') @retry(10) def calcBenPct(): randomerror() outras = (100 * (Raster(barrierRaster) / lcDist)) outras.save(barrierRasterPct) calcBenPct() # Mosaic barrier results across core area pairs mosaicDir = path.join( cfg.SCRATCHDIR, 'mos' + str(radId) + '_' + str(x + 1)) lu.create_dir(mosaicDir) mosFN = 'mos_temp' tempMosaicRaster = path.join(mosaicDir, mosFN) tempMosaicRasterTrim = path.join( mosaicDir, 'mos_temp_trm') arcpy.env.workspace = mosaicDir if linkLoop == 1: #If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management( barrierRaster, tempMosaicRaster) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: arcpy.CopyRaster_management( trmRaster, tempMosaicRasterTrim) else: if cfg.SUM_BARRIERS: outCon = arcpy.sa.Con( Raster(barrierRaster) < 0, lastMosaicRaster, Raster(barrierRaster) + Raster(lastMosaicRaster)) outCon.save(tempMosaicRaster) if cfg.WRITE_TRIM_RASTERS: outCon = arcpy.sa.Con( Raster(trmRaster) < 0, lastMosaicRasterTrim, Raster(trmRaster) + Raster(lastMosaicRasterTrim)) outCon.save(tempMosaicRasterTrim) else: rasterString = ('"' + barrierRaster + ";" + lastMosaicRaster + '"') @retry(10) def mosaicToNew(): randomerror() arcpy.MosaicToNewRaster_management( rasterString, mosaicDir, mosFN, "", "32_BIT_FLOAT", arcpy.env.cellSize, "1", "MAXIMUM", "MATCH") mosaicToNew() # gprint(str(corex)+'0'+str(corey)) if linkLoop > 1: #Clean up from previous loop lu.delete_data(lastMosaicRaster) lastMosaicDir = path.dirname(lastMosaicRaster) lu.clean_out_workspace(lastMosaicDir) lu.delete_dir(lastMosaicDir) lastMosaicRaster = tempMosaicRaster if cfg.WRITE_TRIM_RASTERS: lastMosaicRasterTrim = tempMosaicRasterTrim if cfg.WRITE_PCT_RASTERS: mosPctFN = 'mos_temp_pct' mosaicDirPct = path.join( cfg.SCRATCHDIR, 'mosP' + str(radId) + '_' + str(x + 1)) lu.create_dir(mosaicDirPct) tempMosaicRasterPct = path.join( mosaicDirPct, mosPctFN) if linkLoop == 1: # If this is the first grid then copy # rather than mosaic if cfg.SUM_BARRIERS: outCon = arcpy.sa.Con( Raster(barrierRasterPct) < 0, 0, arcpy.sa.Con(IsNull(barrierRasterPct), 0, barrierRasterPct)) outCon.save(tempMosaicRasterPct) else: arcpy.CopyRaster_management( barrierRasterPct, tempMosaicRasterPct) else: if cfg.SUM_BARRIERS: @retry(10) def sumBarriers(): randomerror() outCon = arcpy.sa.Con( Raster(barrierRasterPct) < 0, lastMosaicRasterPct, Raster(barrierRasterPct) + Raster(lastMosaicRasterPct)) outCon.save(tempMosaicRasterPct) sumBarriers() else: rasterString = ('"' + barrierRasterPct + ";" + lastMosaicRasterPct + '"') @retry(10) def maxBarriers(): randomerror() arcpy.MosaicToNewRaster_management( rasterString, mosaicDirPct, mosPctFN, "", "32_BIT_FLOAT", arcpy.env.cellSize, "1", "MAXIMUM", "MATCH") maxBarriers() if linkLoop > 1: #Clean up from previous loop lu.delete_data(lastMosaicRasterPct) lastMosaicDirPct = path.dirname( lastMosaicRasterPct) lu.clean_out_workspace(lastMosaicDirPct) lu.delete_dir(lastMosaicDirPct) # lu.delete_data(lastMosaicRasterPct) lastMosaicRasterPct = tempMosaicRasterPct if not cfg.SAVEBARRIERRASTERS: lu.delete_data(barrierRaster) if cfg.WRITE_PCT_RASTERS: lu.delete_data(barrierRasterPct) if cfg.WRITE_TRIM_RASTERS: lu.delete_data(trmRaster) # Temporarily disable links in linktable - # don't want to mosaic them twice for y in range(x + 1, numLinks): corex1 = int(coreList[y, 0]) corey1 = int(coreList[y, 1]) if corex1 == corex and corey1 == corey: linkTable[y, cfg.LTB_LINKTYPE] = ( linkTable[y, cfg.LTB_LINKTYPE] + 1000) elif corex1 == corey and corey1 == corex: linkTable[y, cfg.LTB_LINKTYPE] = ( linkTable[y, cfg.LTB_LINKTYPE] + 1000) if numCorridorLinks > 1 and pctDone < 100: gprint('100 percent done') gprint('Summarizing barrier data for search radius.') #rows that were temporarily disabled rows = npy.where(linkTable[:, cfg.LTB_LINKTYPE] > 1000) linkTable[rows, cfg.LTB_LINKTYPE] = ( linkTable[rows, cfg.LTB_LINKTYPE] - 1000) # ----------------------------------------------------------------- # Set negative values to null or zero and write geodatabase. mosaicFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius)) mosaicRaster = path.join(cfg.BARRIERGDB, mosaicFN) arcpy.env.extent = cfg.RESRAST # if setCoresToNull: # outCon = arcpy.sa.Con(Raster(tempMosaicRaster) < 0, 0, # tempMosaicRaster) #xxx # outCon.save(mosaicRaster) #xxx # else: outSetNull = arcpy.sa.SetNull(tempMosaicRaster, tempMosaicRaster, "VALUE < 0") #xxx orig outSetNull.save(mosaicRaster) lu.delete_data(tempMosaicRaster) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: mosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + "_Rad" + str(radius)) mosaicRasterTrim = path.join(cfg.BARRIERGDB, mosaicFN) arcpy.CopyRaster_management(tempMosaicRasterTrim, mosaicRasterTrim) lu.delete_data(tempMosaicRaster) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + "_Rad" + str(radius)) arcpy.env.extent = cfg.RESRAST outSetNull = arcpy.sa.SetNull(tempMosaicRasterPct, tempMosaicRasterPct, "VALUE < 0") mosaicRasterPct = path.join(cfg.BARRIERGDB, mosaicPctFN) outSetNull.save(mosaicRasterPct) lu.delete_data(tempMosaicRasterPct) # 'Grow out' maximum restoration gain to # neighborhood size for display InNeighborhood = "CIRCLE " + str(outerRadius) + " MAP" # Execute FocalStatistics fillRasterFN = "barriers_fill" + str(outerRadius) + tif fillRaster = path.join(cfg.BARRIERBASEDIR, fillRasterFN) outFocalStats = arcpy.sa.FocalStatistics( mosaicRaster, InNeighborhood, "MAXIMUM", "DATA") outFocalStats.save(fillRaster) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster fillRasterPctFN = "barriers_fill_pct" + str( outerRadius) + tif fillRasterPct = path.join(cfg.BARRIERBASEDIR, fillRasterPctFN) outFocalStats = arcpy.sa.FocalStatistics( mosaicRasterPct, InNeighborhood, "MAXIMUM", "DATA") outFocalStats.save(fillRasterPct) #Place copies of filled rasters in output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fillRasterFN = (PREFIX + "_BarrrierCircles" + sumSuffix + "_Rad" + str(outerRadius)) arcpy.CopyRaster_management(fillRaster, fillRasterFN) if cfg.WRITE_PCT_RASTERS: fillRasterPctFN = (PREFIX + "_BarrrierCircles_Pct" + sumSuffix + "_Rad" + str(outerRadius)) arcpy.CopyRaster_management(fillRasterPct, fillRasterPctFN) if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: # Create pared-down version of filled raster- remove pixels # that don't need restoring by allowing a pixel to only # contribute its resistance value to restoration gain outRasterFN = "barriers_trm" + str(outerRadius) + tif outRaster = path.join(cfg.BARRIERBASEDIR, outRasterFN) rasterList = [fillRaster, resistFillRaster] outCellStatistics = arcpy.sa.CellStatistics( rasterList, "MINIMUM") outCellStatistics.save(outRaster) #SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER outRaster2FN = ("barriers_trm" + sumSuffix + str(outerRadius) + "_2" + tif) outRaster2 = path.join(cfg.BARRIERBASEDIR, outRaster2FN) output = arcpy.sa.Con(IsNull(fillRaster), fillRaster, outRaster) output.save(outRaster2) outRasterFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + "_Rad" + str(outerRadius)) outRasterPath = path.join(cfg.BARRIERGDB, outRasterFN) arcpy.CopyRaster_management(outRaster2, outRasterFN) randomerror() startTime = lu.elapsed_time(startTime)
def STEP3_calc_cwds(): """Calculates cost-weighted distances from each core area. Uses bounding circles around source and target cores to limit extent of cwd calculations and speed computation. """ try: lu.dashline(1) gprint('Running script ' + _SCRIPT_NAME) lu.dashline(0) # Super secret setting to re-start failed run. Enter 'RESTART' as the # Name of the pairwise distance table in step 2, and uncheck step 2. # We can eventually place this in a .ini file. rerun = False if cfg.S2EUCDISTFILE != None: if cfg.S2EUCDISTFILE.lower() == "restart": rerun = True # if cfg.TMAXCWDIST is None: # gprint('NOT using a maximum cost-weighted distance.') # else: # gprint('Max cost-weighted distance for CWD calcs set ' # 'to ' + str(cfg.TMAXCWDIST) + '\n') if (cfg.BUFFERDIST) is not None: gprint('Bounding circles plus a buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units will ' 'be used \n to limit extent of cost distance ' 'calculations.') elif cfg.TOOL <> cfg.TOOL_CC: gprint('NOT using bounding circles in cost distance ' 'calculations.') # set the analysis extent and cell size # So we don't extract rasters that go beyond extent of original raster if arcpy: arcpy.env.cellSize = cfg.RESRAST arcpy.env.extent="MINOF" else: gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight gp.Extent = "MINOF" gp.mask = cfg.RESRAST if arcpy: arcpy.env.overwriteOutput = True arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR else: gp.OverwriteOutput = True gp.workspace = cfg.SCRATCHDIR gp.scratchWorkspace = cfg.ARCSCRATCHDIR # Load linkTable (created in previous script) linkTableFile = lu.get_prev_step_link_table(step=3) linkTable = lu.load_link_table(linkTableFile) lu.report_links(linkTable) # Identify cores to map from LinkTable coresToMap = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) numCoresToMap = len(coresToMap) if numCoresToMap < 3: # No need to check for intermediate cores, because there aren't any cfg.S3DROPLCCSic = False else: cfg.S3DROPLCCSic = cfg.S3DROPLCCS gprint('\nNumber of core areas to connect: ' + str(numCoresToMap)) if rerun: # If picking up a failed run, make sure needed files are there lu.dashline(1) gprint ('\n****** RESTART MODE ENABLED ******\n') gprint ('**** NOTE: This mode picks up step 3 where a\n' 'previous run left off due to a crash or user\n' 'abort. It assumes you are using the same input\n' 'data used in the terminated run.****\n') lu.dashline(0) lu.snooze(10) savedLinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") coreListFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") if not path.exists(savedLinkTableFile) or not path.exists( coreListFile): gprint('No partial results file found from previous ' 'stopped run. Starting run from beginning.\n') lu.dashline(0) rerun = False # If picking up a failed run, use old folders if not rerun: startIndex = 0 if cfg.TOOL <> cfg.TOOL_CC: lu.make_cwd_paths(max(coresToMap)) # Set up cwd directories # make a feature layer for input cores to select from gp.MakeFeatureLayer(cfg.COREFC, cfg.FCORES) # Drop links that are too long gprint('\nChecking for corridors that are too long to map.') DISABLE_LEAST_COST_NO_VAL = False linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, 0, cfg.MAXCOSTDIST, 0, DISABLE_LEAST_COST_NO_VAL) # ------------------------------------------------------------------ # Bounding boxes if (cfg.BUFFERDIST) is not None: # create bounding boxes around cores start_time = time.clock() # lu.dashline(1) gprint('Calculating bounding boxes for core areas.') extentBoxList = npy.zeros((0,5), dtype='float32') for x in range(len(coresToMap)): core = coresToMap[x] boxCoords = lu.get_extent_box_coords(core) extentBoxList = npy.append(extentBoxList, boxCoords, axis=0) gprint('\nDone calculating bounding boxes.') start_time = lu.elapsed_time(start_time) # lu.dashline() # Bounding circle code if cfg.BUFFERDIST is not None: # Make a set of circles encompassing core areas we'll be connecting start_time = time.clock() gprint('Calculating bounding circles around potential' ' corridors.') # x y corex corey radius- stores data for bounding circle centroids boundingCirclePointArray = npy.zeros((0,5), dtype='float32') circleList = npy.zeros((0,3), dtype='int32') numLinks = linkTable.shape[0] for x in range(0, numLinks): if ((linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_CORR) or (linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_KEEP)): # if it's a valid corridor link linkId = int(linkTable[x,cfg.LTB_LINKID]) # fixme- this code is clumsy- can trim down cores = npy.zeros((1,3), dtype='int32') cores[0,:] = npy.sort([0, linkTable[x,cfg.LTB_CORE1], linkTable[x,cfg.LTB_CORE2]]) corex = cores[0,1] corey = cores[0,2] cores[0,0] = linkId ################### foundFlag = False for y in range(0,len(circleList)): # clumsy if (circleList[y,1] == corex and circleList[y,2] == corey): foundFlag = True if not foundFlag: circlePointData = ( lu.get_bounding_circle_data(extentBoxList, corex, corey, cfg.BUFFERDIST)) boundingCirclePointArray = ( npy.append(boundingCirclePointArray, circlePointData, axis=0)) # keep track of which cores we draw bounding circles # around circleList = npy.append(circleList, cores, axis=0) gprint('\nCreating bounding circles using buffer ' 'analysis.') dir, BNDCIRCENS = path.split(cfg.BNDCIRCENS) lu.make_points(cfg.SCRATCHDIR, boundingCirclePointArray, BNDCIRCENS) lu.delete_data(cfg.BNDCIRS) gp.buffer_analysis(cfg.BNDCIRCENS, cfg.BNDCIRS, "radius") gp.deletefield (cfg.BNDCIRS, "BUFF_DIST") gprint('Successfully created bounding circles around ' 'potential corridors using \na buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.') start_time = lu.elapsed_time(start_time) gprint('Reducing global processing area using bounding ' 'circle plus buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.\n') extentBoxList = npy.zeros((0,5),dtype='float32') boxCoords = lu.get_extent_box_coords() extentBoxList = npy.append(extentBoxList,boxCoords,axis=0) extentBoxList[0,0] = 0 boundingCirclePointArray = npy.zeros((0,5),dtype='float32') circlePointData=lu.get_bounding_circle_data(extentBoxList, 0, 0, cfg.BUFFERDIST) dir, BNDCIRCEN = path.split(cfg.BNDCIRCEN) lu.make_points(cfg.SCRATCHDIR, circlePointData, BNDCIRCEN) lu.delete_data(cfg.BNDCIR) gp.buffer_analysis(cfg.BNDCIRCEN, cfg.BNDCIR, "radius") gprint('Extracting raster....') cfg.BOUNDRESIS = cfg.BOUNDRESIS + tif lu.delete_data(cfg.BOUNDRESIS) count = 0 statement = ( 'gp.ExtractByMask_sa(cfg.RESRAST, cfg.BNDCIR, cfg.BOUNDRESIS)') while True: try: exec statement randomerror() except: count,tryAgain = lu.retry_arc_error(count,statement) if not tryAgain: exec statement else: break gprint('\nReduced resistance raster extracted using ' 'bounding circle.') else: #if not using bounding circles, just go with resistance raster. cfg.BOUNDRESIS = cfg.RESRAST # --------------------------------------------------------------------- # Rasterize core areas to speed cost distance calcs # lu.dashline(1) gprint("Creating core area raster.") gp.SelectLayerByAttribute(cfg.FCORES, "CLEAR_SELECTION") if arcpy: arcpy.env.cellSize = cfg.BOUNDRESIS arcpy.env.extent = cfg.BOUNDRESIS else: gp.cellSize = gp.Describe(cfg.BOUNDRESIS).MeanCellHeight gp.extent = gp.Describe(cfg.BOUNDRESIS).extent if rerun: # saved linktable replaces the one now in memory linkTable = lu.load_link_table(savedLinkTableFile) coresToMapSaved = npy.loadtxt(coreListFile, dtype='Float64', comments='#', delimiter=',') startIndex = coresToMapSaved[0] # Index of core where we left off del coresToMapSaved gprint ('\n****** Re-starting run at core area number ' + str(int(coresToMap[startIndex]))+ ' ******\n') lu.dashline(0) if arcpy: arcpy.env.extent = "MINOF" else: gp.extent = "MINOF" #---------------------------------------------------------------------- # Loop through cores, do cwd calcs for each if cfg.TOOL == cfg.TOOL_CC: gprint("\nMapping least-cost paths.\n") else: gprint("\nStarting cost distance calculations.\n") lcpLoop = 0 failures = 0 x = startIndex endIndex = len(coresToMap) linkTableMod = linkTable.copy() while x < endIndex: startTime1 = time.clock() # Modification of linkTable in function was causing problems. so # make a copy: linkTablePassed = linkTableMod.copy() (linkTableReturned, failures, lcpLoop) = do_cwd_calcs(x, linkTablePassed, coresToMap, lcpLoop, failures) if failures == 0: # If iteration was successful, continue with next core linkTableMod = linkTableReturned sourceCore = int(coresToMap[x]) gprint('Done with all calculations for core ID #' + str(sourceCore) + '. ' + str(int(x + 1)) + ' of ' + str(endIndex) + ' cores have been processed.') start_time = lu.elapsed_time(startTime1) outlinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.write_link_table(linkTableMod, outlinkTableFile) # Increment loop counter x = x + 1 else: # If iteration failed, try again after a wait period delay_restart(failures) #---------------------------------------------------------------------- linkTable = linkTableMod # reinstate temporarily disabled links rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE] > 1000) linkTable[rows,cfg.LTB_LINKTYPE] = (linkTable[rows,cfg.LTB_LINKTYPE] - 1000) # Drop links that are too long DISABLE_LEAST_COST_NO_VAL = True linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST, cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL) # Write link table file outlinkTableFile = lu.get_this_step_link_table(step=3) gprint('Updating ' + outlinkTableFile) lu.write_link_table(linkTable, outlinkTableFile) linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s3.csv") lu.write_link_table(linkTable, linkTableLogFile) start_time = time.clock() gprint('Creating shapefiles with linework for links...') try: lu.write_link_maps(outlinkTableFile, step=3) except: lu.write_link_maps(outlinkTableFile, step=3) start_time = lu.elapsed_time(start_time) gprint('\nIndividual cost-weighted distance layers written ' 'to "cwd" directory. \n') gprint(outlinkTableFile + '\n updated with cost-weighted distances between core areas.') #Clean up temporary files for restart code tempFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") lu.delete_file(tempFile) tempFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.delete_file(tempFile) # Check if climate tool is calling linkage mapper if cfg.TOOL == cfg.TOOL_CC: coreList = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) for core in coreList: cwdRaster = lu.get_cwd_path(int(core)) back_rast = cwdRaster.replace("cwd_", "back_") lu.delete_data(back_rast) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def STEP8_calc_pinchpoints(): """ Maps pinch points in Linkage Mapper corridors using Circuitscape given CWD calculations from s3_calcCwds.py. """ try: lu.dashline(0) gprint('Running script ' + _SCRIPT_NAME) restartFlag = False if cfg.CWDCUTOFF < 0: cfg.CWDCUTOFF = cfg.CWDCUTOFF * -1 restartFlag = True # Restart code in progress CSPATH = lu.get_cs_path() outputGDB = path.join(cfg.OUTPUTDIR, path.basename(cfg.PINCHGDB)) arcpy.OverWriteOutput = True arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR arcpy.env.pyramid = "NONE" arcpy.env.rasterstatistics = "NONE" # set the analysis extent and cell size to that of the resistance # surface arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.snapraster = cfg.RESRAST resRaster = cfg.RESRAST arcpy.env.extent = "MINOF" minObject = arcpy.GetRasterProperties_management(resRaster, "MINIMUM") rasterMin = float(str(minObject.getOutput(0))) if rasterMin <= 0: msg = ( 'Error: resistance raster cannot have 0 or negative values.') lu.raise_error(msg) if cfg.DO_ADJACENTPAIRS: prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep=8) if not arcpy.Exists(prevLcpShapefile): msg = ('Cannot find an LCP shapefile from step 5. Please ' 'rerun that step and any previous ones if necessary.') lu.raise_error(msg) # Remove lcp shapefile lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s8.shp") lu.delete_data(lcpShapefile) inLinkTableFile = lu.get_prev_step_link_table(step=8) linkTable = lu.load_link_table(inLinkTableFile) numLinks = linkTable.shape[0] numCorridorLinks = lu.report_links(linkTable) if numCorridorLinks == 0: lu.dashline(1) msg = ('\nThere are no linkages. Bailing.') lu.raise_error(msg) if linkTable.shape[1] < 16: # If linktable has no entries from prior # centrality or pinchpint analyses extraCols = npy.zeros((numLinks, 6), dtype="float64") linkTable = linkTable[:, 0:10] linkTable = npy.append(linkTable, extraCols, axis=1) linkTable[:, cfg.LTB_LCPLEN] = -1 linkTable[:, cfg.LTB_CWDEUCR] = -1 linkTable[:, cfg.LTB_CWDPATHR] = -1 linkTable[:, cfg.LTB_EFFRESIST] = -1 linkTable[:, cfg.LTB_CWDTORR] = -1 linkTable[:, cfg.LTB_CURRENT] = -1 del extraCols # set up directories for circuit and circuit mosaic grids # Create output geodatabase if not arcpy.Exists(cfg.PINCHGDB): arcpy.CreateFileGDB_management(cfg.OUTPUTDIR, path.basename(cfg.PINCHGDB)) mosaicRaster = path.join(cfg.CIRCUITBASEDIR, "current_mos" + tif) coresToProcess = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) maxCoreNum = max(coresToProcess) del coresToProcess lu.dashline(0) coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1] coreList = npy.sort(coreList) #gprint('There are ' + str(len(npy.unique(coreList))) ' core areas.') INCIRCUITDIR = cfg.CIRCUITBASEDIR OUTCIRCUITDIR = path.join(cfg.CIRCUITBASEDIR, cfg.CIRCUITOUTPUTDIR_NM) CONFIGDIR = path.join(INCIRCUITDIR, cfg.CIRCUITCONFIGDIR_NM) # Cutoff value text to append to filenames cutoffText = str(cfg.CWDCUTOFF) if cutoffText[-6:] == '000000': cutoffText = cutoffText[0:-6] + 'm' elif cutoffText[-3:] == '000': cutoffText = cutoffText[0:-3] + 'k' if cfg.SQUARERESISTANCES: # Square resistance values squaredRaster = path.join(cfg.SCRATCHDIR, 'res_sqr') arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR outRas = Raster(resRaster) * Raster(resRaster) outRas.save(squaredRaster) resRaster = squaredRaster if cfg.DO_ADJACENTPAIRS: linkLoop = 0 lu.dashline(1) gprint('Mapping pinch points in individual corridors \n' 'using Circuitscape.') lu.dashline(1) gprint('If you try to cancel your run and the Arc dialog hangs, ') gprint('you can kill Circuitscape by opening Windows Task Manager') gprint('and ending the cs_run.exe process.') lu.dashline(2) for x in range(0, numLinks): linkId = str(int(linkTable[x, cfg.LTB_LINKID])) if not (linkTable[x, cfg.LTB_LINKTYPE] > 0): continue linkLoop = linkLoop + 1 linkDir = path.join(cfg.SCRATCHDIR, 'link' + linkId) if restartFlag == True and path.exists(linkDir): gprint('continuing') continue restartFlag = False lu.create_dir(linkDir) start_time1 = time.clock() # source and target cores corex = int(coreList[x, 0]) corey = int(coreList[x, 1]) # Get cwd rasters for source and target cores cwdRaster1 = lu.get_cwd_path(corex) cwdRaster2 = lu.get_cwd_path(corey) lccNormRaster = path.join(linkDir, 'lcc_norm') arcpy.env.extent = "MINOF" link = lu.get_links_from_core_pairs(linkTable, corex, corey) lcDist = float(linkTable[link, cfg.LTB_CWDIST]) # Normalized lcc rasters are created by adding cwd rasters # and subtracting the least cost distance between them. outRas = Raster(cwdRaster1) + Raster(cwdRaster2) - lcDist outRas.save(lccNormRaster) #create raster mask resMaskRaster = path.join(linkDir, 'res_mask' + tif) #create raster mask outCon = arcpy.sa.Con( Raster(lccNormRaster) <= cfg.CWDCUTOFF, 1) outCon.save(resMaskRaster) # Convert to poly. Use as mask to clip resistance raster. resMaskPoly = path.join(linkDir, 'res_mask_poly.shp') arcpy.RasterToPolygon_conversion(resMaskRaster, resMaskPoly, "NO_SIMPLIFY") arcpy.env.extent = resMaskPoly # Includes 0 values in some cases with CP LI model if tif # so using ESRI Grid format resClipRasterMasked = path.join(linkDir, 'res_clip_m') # Extract masked resistance raster. # Needs to be float to get export to npy to work. outRas = arcpy.sa.ExtractByMask(resRaster, resMaskPoly) + 0.0 outRas.save(resClipRasterMasked) resNpyFN = 'resistances_link_' + linkId + '.npy' resNpyFile = path.join(INCIRCUITDIR, resNpyFN) numElements, numResistanceNodes = export_ras_to_npy( resClipRasterMasked, resNpyFile) totMem, availMem = lu.get_mem() # gprint('Total memory: str(totMem)) if numResistanceNodes / availMem > 2000000: lu.dashline(1) lu.warn('Warning:') lu.warn('Circuitscape can only solve 2-3 million nodes') lu.warn( 'per gigabyte of available RAM. \nTotal physical RAM' ' on your machine is ~' + str(totMem) + ' GB. \nAvailable memory is ~' + str(availMem) + ' GB. \nYour resistance raster has ' + str(numResistanceNodes) + ' nodes.') lu.dashline(2) corePairRaster = path.join(linkDir, 'core_pairs' + tif) arcpy.env.extent = resClipRasterMasked # Next result needs to be floating pt for numpy export outCon = arcpy.sa.Con( Raster(cwdRaster1) == 0, corex, arcpy.sa.Con(Raster(cwdRaster2) == 0, corey + 0.0)) outCon.save(corePairRaster) coreNpyFN = 'cores_link_' + linkId + '.npy' coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN) numElements, numNodes = export_ras_to_npy( corePairRaster, coreNpyFile) arcpy.env.extent = "MINOF" # Set circuitscape options and call options = lu.setCircuitscapeOptions() if cfg.WRITE_VOLT_MAPS == True: options['write_volt_maps'] = True options['habitat_file'] = resNpyFile # if int(linkId) > 2: # options['habitat_file'] = 'c:\\test.dummy' options['point_file'] = coreNpyFile options['set_focal_node_currents_to_zero'] = True outputFN = 'Circuitscape_link' + linkId + '.out' options['output_file'] = path.join(OUTCIRCUITDIR, outputFN) if numElements > 250000: options['print_timings'] = True configFN = 'pinchpoint_config' + linkId + '.ini' outConfigFile = path.join(CONFIGDIR, configFN) lu.writeCircuitscapeConfigFile(outConfigFile, options) gprint('Processing link ID #' + str(linkId) + '. Resistance map' ' has ' + str(int(numResistanceNodes)) + ' nodes.') memFlag = call_circuitscape(CSPATH, outConfigFile) currentFN = ('Circuitscape_link' + linkId + '_cum_curmap.npy') currentMap = path.join(OUTCIRCUITDIR, currentFN) if not arcpy.Exists(currentMap): print_failure(numResistanceNodes, memFlag, 10) numElements, numNodes = export_ras_to_npy( resClipRasterMasked, resNpyFile) memFlag = call_circuitscape(CSPATH, outConfigFile) currentFN = ('Circuitscape_link' + linkId + '_cum_curmap.npy') currentMap = path.join(OUTCIRCUITDIR, currentFN) if not arcpy.Exists(currentMap): msg = ( '\nCircuitscape failed. See error information above.') arcpy.AddError(msg) lu.write_log(msg) exit(1) # Either set core areas to nodata in current map or # divide each by its radius currentRaster = path.join(linkDir, "current" + tif) import_npy_to_ras(currentMap, corePairRaster, currentRaster) if cfg.WRITE_VOLT_MAPS == True: voltFN = ('Circuitscape_link' + linkId + '_voltmap_' + str(corex) + '_' + str(corey) + '.npy') voltMap = path.join(OUTCIRCUITDIR, voltFN) voltRaster = path.join( outputGDB, cfg.PREFIX + "_voltMap_" + str(corex) + '_' + str(corey)) import_npy_to_ras(voltMap, corePairRaster, voltRaster) gprint('Building output statistics and pyramids ' 'for voltage raster\n') lu.build_stats(voltRaster) arcpy.env.extent = currentRaster if SETCORESTONULL: # Set core areas to NoData in current map for color ramping currentRaster2 = currentRaster + '2' + tif outCon = arcpy.sa.Con( arcpy.sa.IsNull(Raster(corePairRaster)), Raster(currentRaster)) outCon.save(currentRaster2) currentRaster = currentRaster2 arcpy.env.extent = "MAXOF" if linkLoop == 1: lu.delete_data(mosaicRaster) @retry(10) def copyRas2(): arcpy.CopyRaster_management(currentRaster, mosaicRaster) copyRas2() else: @retry(10) def mosaicRas(): arcpy.Mosaic_management(currentRaster, mosaicRaster, "MAXIMUM", "MATCH") mosaicRas() resistancesFN = ('Circuitscape_link' + linkId + '_resistances_3columns.out') resistancesFile = path.join(OUTCIRCUITDIR, resistancesFN) resistances = npy.loadtxt(resistancesFile, dtype='Float64', comments='#') resistance = float(str(arcpy.env.cellSize)) * resistances[2] linkTable[link, cfg.LTB_EFFRESIST] = resistance # Ratio if not cfg.SQUARERESISTANCES: linkTable[link, cfg.LTB_CWDTORR] = ( linkTable[link, cfg.LTB_CWDIST] / linkTable[link, cfg.LTB_EFFRESIST]) # Clean up if cfg.SAVE_TEMP_CIRCUIT_FILES == False: lu.delete_file(coreNpyFile) coreNpyBase, extension = path.splitext(coreNpyFile) lu.delete_data(coreNpyBase + '.hdr') lu.delete_file(resNpyFile) resNpyBase, extension = path.splitext(resNpyFile) lu.delete_data(resNpyBase + '.hdr') lu.delete_file(currentMap) curMapBase, extension = path.splitext(currentMap) lu.delete_data(curMapBase + '.hdr') lu.delete_data(currentRaster) lu.clean_out_workspace(linkDir) lu.delete_dir(linkDir) gprint('Finished with link ID #' + str(linkId) + '. ' + str(linkLoop) + ' out of ' + str(numCorridorLinks) + ' links have been processed.') start_time1 = lu.elapsed_time(start_time1) outputRaster = path.join( outputGDB, cfg.PREFIX + "_current_adjacentPairs_" + cutoffText) lu.delete_data(outputRaster) @retry(10) def copyRas(): arcpy.CopyRaster_management(mosaicRaster, outputRaster) copyRas() gprint('Building output statistics and pyramids ' 'for corridor pinch point raster\n') lu.build_stats(outputRaster) finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=5, thisStep=8) linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile) linkTableFinalFile = path.join( cfg.OUTPUTDIR, cfg.PREFIX + "_linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFinalFile, inLinkTableFile) gprint('Copy of linkTable written to ' + linkTableFinalFile) #fixme: update sticks? gprint('Creating shapefiles with linework for links.') lu.write_link_maps(linkTableFinalFile, step=8) # Copy final link maps to gdb. lu.copy_final_link_maps(step=8) lu.delete_data(mosaicRaster) if not cfg.DO_ALLPAIRS: # Clean up temporary files if not cfg.SAVECURRENTMAPS: lu.delete_dir(OUTCIRCUITDIR) return lu.dashline(1) gprint('Mapping global pinch points among all\n' 'core area pairs using Circuitscape.') if cfg.ALL_PAIR_SCENARIO == 'pairwise': gprint('Circuitscape will be run in PAIRWISE mode.') else: gprint('Circuitscape will be run in ALL-TO-ONE mode.') arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST S8CORE_RAS = "s8core_ras" s8CoreRasPath = path.join(cfg.SCRATCHDIR, S8CORE_RAS) arcpy.FeatureToRaster_conversion(cfg.COREFC, cfg.COREFN, s8CoreRasPath, arcpy.env.cellSize) binaryCoreRaster = path.join(cfg.SCRATCHDIR, "core_ras_bin") # The following commands cause file lock problems on save. using gp # instead. # outCon = arcpy.sa.Con(S8CORE_RAS, 1, "#", "VALUE > 0") # outCon.save(binaryCoreRaster) # gp.Con_sa(s8CoreRasPath, 1, binaryCoreRaster, "#", "VALUE > 0") outCon = arcpy.sa.Con(Raster(s8CoreRasPath) > 0, 1) outCon.save(binaryCoreRaster) s5corridorRas = path.join(cfg.OUTPUTGDB, cfg.PREFIX + "_corridors") if not arcpy.Exists(s5corridorRas): s5corridorRas = path.join(cfg.OUTPUTGDB, cfg.PREFIX + "_lcc_mosaic_int") outCon = arcpy.sa.Con( Raster(s5corridorRas) <= cfg.CWDCUTOFF, Raster(resRaster), arcpy.sa.Con(Raster(binaryCoreRaster) > 0, Raster(resRaster))) resRasClipPath = path.join(cfg.SCRATCHDIR, 'res_ras_clip') outCon.save(resRasClipPath) arcpy.env.cellSize = resRasClipPath arcpy.env.extent = resRasClipPath s8CoreRasClipped = s8CoreRasPath + '_c' # Produce core raster with same extent as clipped resistance raster # added to ensure correct data type- nodata values were positive for # cores otherwise outCon = arcpy.sa.Con(arcpy.sa.IsNull(Raster(s8CoreRasPath)), -9999, Raster(s8CoreRasPath)) outCon.save(s8CoreRasClipped) resNpyFN = 'resistances.npy' resNpyFile = path.join(INCIRCUITDIR, resNpyFN) numElements, numResistanceNodes = export_ras_to_npy( resRasClipPath, resNpyFile) totMem, availMem = lu.get_mem() # gprint('Total memory: str(totMem)) if numResistanceNodes / availMem > 2000000: lu.dashline(1) lu.warn('Warning:') lu.warn('Circuitscape can only solve 2-3 million nodes') lu.warn('per gigabyte of available RAM. \nTotal physical RAM ' 'on your machine is ~' + str(totMem) + ' GB. \nAvailable memory is ~' + str(availMem) + ' GB. \nYour resistance raster has ' + str(numResistanceNodes) + ' nodes.') lu.dashline(0) coreNpyFN = 'cores.npy' coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN) numElements, numNodes = export_ras_to_npy(s8CoreRasClipped, coreNpyFile) arcpy.env.extent = "MINOF" options = lu.setCircuitscapeOptions() options['scenario'] = cfg.ALL_PAIR_SCENARIO options['habitat_file'] = resNpyFile options['point_file'] = coreNpyFile options['set_focal_node_currents_to_zero'] = True outputFN = 'Circuitscape.out' options['output_file'] = path.join(OUTCIRCUITDIR, outputFN) options['print_timings'] = True configFN = 'pinchpoint_allpair_config.ini' outConfigFile = path.join(CONFIGDIR, configFN) lu.writeCircuitscapeConfigFile(outConfigFile, options) gprint('\nResistance map has ' + str(int(numResistanceNodes)) + ' nodes.') lu.dashline(1) gprint('If you try to cancel your run and the Arc dialog hangs, ') gprint('you can kill Circuitscape by opening Windows Task Manager') gprint('and ending the cs_run.exe process.') lu.dashline(0) call_circuitscape(CSPATH, outConfigFile) # test = subprocess.call([CSPATH, outConfigFile], # creationflags = subprocess.CREATE_NEW_CONSOLE) if options['scenario'] == 'pairwise': rasterSuffix = "_current_allPairs_" + cutoffText else: rasterSuffix = "_current_allToOne_" + cutoffText currentFN = 'Circuitscape_cum_curmap.npy' currentMap = path.join(OUTCIRCUITDIR, currentFN) outputRaster = path.join(outputGDB, cfg.PREFIX + rasterSuffix) currentRaster = path.join(cfg.SCRATCHDIR, "current") try: import_npy_to_ras(currentMap, resRasClipPath, outputRaster) except: lu.dashline(1) msg = ('ERROR: Circuitscape failed. \n' 'Note: Circuitscape can only solve 2-3 million nodes' '\nper gigabyte of available RAM. The resistance ' '\nraster for the last corridor had ' + str(numResistanceNodes) + ' nodes.\n\nResistance ' 'raster values that vary by >6 orders of \nmagnitude' ' can also cause failures, as can a mismatch in ' '\ncore area and resistance raster extents.') arcpy.AddError(msg) lu.write_log(msg) exit(1) #set core areas to nodata if SETCORESTONULL: # Set core areas to NoData in current map for color ramping outputRasterND = outputRaster + '_noDataCores' outCon = arcpy.sa.SetNull( Raster(s8CoreRasClipped) > 0, Raster(outputRaster)) outCon.save(outputRasterND) gprint('\nBuilding output statistics and pyramids ' 'for centrality raster.') lu.build_stats(outputRaster) lu.build_stats(outputRasterND) # Clean up temporary files if not cfg.SAVECURRENTMAPS: lu.delete_dir(OUTCIRCUITDIR) # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Failed in step 8. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 8. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def main(): """Iterates over LM, BM, and restoration tasks""" ## USER SETTINGS ###################################################### ## Restoration Settings ## ALL input data must be in the same projection start_time = time.clock() restoreMaxROI = False # Set to True to restore highest ROI # Set to False to restore strongest barrier restoredResistanceVal = 1 # Resistance value of restored habitat. Must be 1 or greater. restorationDataGDB = "C:\\barrierClassAnalysis\\RestorationINPUTS_July2013.gdb" # No spaces or special chars in paths or gdb names outputDir = "C:\\barrierClassAnalysis\\output" # No spaces in path, avoid using dropbox or network drive # Project directories will be created in this (iter1, iter2...) # as will an output geodatabase resistanceRaster = "URWA_resis"# Resistance raster. Should be in input GDB coreFC = 'URWA_HCAs_Doug_Grant'# Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant' coreFN = 'HCA_ID' # Core area field name radius = 450 # restoration radius in meters iterations = 13 # number of restorations to perform minAgThreshold = 0.75 # if less than this proportion of ag in circle, don't consider restoring circle minImprovementVal = 0 # Don't consider barriers below this improvement score (average improvement per meter diameter restored) parcelCostRaster = 'DougGrantParcelCost_m2_projected_90m' # Average per-m2 parcel cost per pixel. Snapped to resistance raster. restorationCostRaster = 'restCostPer_m2' # Right now this is just a raster with all pixels set to 0.113174 agRaster = "ARESmaskp_projected" # 1=Ag, 0 = not Ag barrierCombineMethod = 'Maximum' # Some restorations benefit multiple corridors. # 'Maximum' takes the greatest improvement across core area pairs # 'Sum' adds improvement scores acreoss all pairs. cwdThresh = None # Use cwdThresh = None for no threshold. Use cwdThresh = X to not consider # restorations more than X map units away from each core area. ## END USER SETTINGS ###################################################### try: # Setup path and create directories gprint('Hey! Make sure everything is in the same projection!\n') gprint('Setting up paths and creating directories') sys.path.append('..\\toolbox\\scripts') resRast = os.path.join(restorationDataGDB, resistanceRaster) coreFCPath = os.path.join(restorationDataGDB, coreFC) # Set up a NEW output gdb (leave previous ones on drive) for i in range (1,200): outputGDB = 'restorationOutput'+str(i)+'.gdb' if not arcpy.Exists(os.path.join(outputDir,outputGDB)): break gprint('Previous output GDB '+ outputGDB +' exists. Delete to save disk space.') arcpy.CreateFileGDB_management(outputDir,outputGDB) outputGDB = os.path.join(outputDir,outputGDB) logFile = os.path.join(outputGDB,'Iterate Barriers'+str(i)+'.py') shutil.copyfile(__file__, logFile) #write a copy of this file to output dir as a record of settings arcpy.env.cellSize = resRast arcpy.env.extent = resRast arcpy.env.snapRaster = resRast arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB spatialref = arcpy.Describe(resRast).spatialReference mapunits = spatialref.linearUnitName gprint('Cell size = ' + str(arcpy.env.cellSize) + ' ' + mapunits +'s') # Calculate fraction of ag within radius of each pixel gprint('Calculating purchase cost, fraction of ag, etc within radius of each pixel.') agRaster = os.path.join(restorationDataGDB, agRaster) inNeighborhood = NbrCircle(radius, "MAP") arcpy.env.extent = agRaster outFocalStats = arcpy.sa.FocalStatistics(agRaster, inNeighborhood, "MEAN","NODATA") proportionAgRaster = os.path.join(outputGDB,'proportionAgRas') outFocalStats.save(proportionAgRaster) arcpy.env.extent = resRast # Calculate purchase cost of circles parcelCostRaster = os.path.join(restorationDataGDB, parcelCostRaster) arcpy.env.extent = parcelCostRaster outFocalStats = arcpy.sa.FocalStatistics(parcelCostRaster,inNeighborhood, "MEAN","DATA") costFocalStatsRaster = os.path.join(outputGDB,'costFocalStatsRaster') outFocalStats.save(costFocalStatsRaster) arcpy.env.extent = resRast circleArea = float(npy.pi * radius * radius) outras = (Raster(costFocalStatsRaster) * circleArea) purchCostRaster = os.path.join(outputGDB,'purchaseCostRaster') outras.save(purchCostRaster) lu.delete_data(costFocalStatsRaster) # restCost = npy.pi * radius * radius * restCostPer_m2 restorationCostRaster = os.path.join(restorationDataGDB, restorationCostRaster) outras = Raster(purchCostRaster) + (Raster(restorationCostRaster) * radius * radius * npy.pi) totalCostRaster = os.path.join(outputGDB,'totalCostRaster') outras.save(totalCostRaster) # lu.build_stats(totalCostRaster) # Create mask to remove areas without cost data arcpy.env.extent = totalCostRaster costMaskRaster = os.path.join(outputGDB,'costMaskRaster') costThresh = 0 outCon = arcpy.sa.Con((Raster(totalCostRaster) > float(costThresh)), 1) outCon.save(costMaskRaster) arcpy.env.extent = resRast # Create mask to remove areas below ag threshold outCon = arcpy.sa.Con((Raster(proportionAgRaster) > float(minAgThreshold)), 1) agMaskRaster = os.path.join(outputGDB, 'agMaskRaster') outCon.save(agMaskRaster) doStep1 = 'true' doStep2 = 'true' doStep5 = 'false' for iter in range(1,iterations+1): #xxx start_time1 = time.clock() arcpy.env.cellSize = resRast # Some env settings get changed by linkage mapper and must be reset here arcpy.env.extent = resRast arcpy.env.snapRaster = resRast arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB lu.dashline(1) gprint('Running iteration number '+str(iter)) projDir = os.path.join(outputDir,'iter' + str(iter)+'Proj') lu.create_dir(outputDir) lu.delete_dir(projDir) #xxx lu.create_dir(projDir) if iter > 1: # Copy previous s2 linktable to new project directory datapassDir = os.path.join(projDir,'datapass') lu.create_dir(datapassDir) projDir1 = os.path.join(outputDir,'iter1Proj') datapassDirIter1 = os.path.join(projDir1,'datapass') s2LinktableIter1 = os.path.join(datapassDirIter1 ,'linkTable_s2.csv') s2LinkTable = os.path.join(datapassDir ,'linkTable_s2.csv') shutil.copyfile(s2LinktableIter1, s2LinkTable) # Run Linkage Mapper distFile = os.path.join(outputDir, coreFC + '_dists.txt') # Copy distances text file from earlier LM run to the output directory- speeds things up! if not os.path.exists(distFile): if iter == 1: gprint('Will calculate distance file.') distFile = '#' else: projDir1 = os.path.join(outputDir,'iter1Proj') distFile1 = os.path.join(projDir1, coreFC + '_dists.txt') shutil.copyfile(distFile1,distFile) # Put a copy here for future runs arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB argv = ('lm_master.py', projDir, coreFCPath, coreFN, resRast, doStep1, doStep2, 'Cost-Weighted & Euclidean', distFile, 'true', 'true', 'false', '4', 'Cost-Weighted', 'true', doStep5, 'true', '200000', '10000', '#', '#', '#', '#') gprint('Running ' + str(argv)) cfg.lm_configured = False # Insures lm_master uses current argv lm_master.lm_master(argv) #xxx doStep1 = 'false' # Can skip for future iterations doStep2 = 'false' # Can skip for future iterations doStep5 = 'false' # Skipping for future iterations startRadius = str(radius) endRadius = str(radius) radiusStep = '0' saveRadiusRasters= 'false' writePctRasters = 'false' argv = ('barrier_master.py', projDir, resRast, startRadius, endRadius, radiusStep, barrierCombineMethod, saveRadiusRasters, writePctRasters, cwdThresh) gprint('Running ' + str(argv)) barrier_master.bar_master(argv) #xxx arcpy.env.cellSize = resRast # Some env settings get changed by linkage mapper and must be reset here arcpy.env.extent = resRast arcpy.env.snapRaster = resRast arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = outputGDB arcpy.env.workspace = outputGDB gprint('Finding restoration circles with max barrier score / ROI') # Find points with max ROI PREFIX = os.path.basename(projDir) if barrierCombineMethod == 'Sum': sumSuffix = 'Sum' else: sumSuffix = '' barrierFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius)) barrierRaster = os.path.join(projDir,'output','barriers.gdb',barrierFN) if not arcpy.Exists(barrierRaster): msg = ('Error: cannot find barrier output: '+barrierRaster) lu.raise_error(msg) # arcpy.env.cellSize = agMaskRaster # arcpy.env.extent = agMaskRaster if iter > 1: gprint('Creating mask for previously restored areas') inNeighborhood = NbrCircle(radius, "MAP") arcpy.env.extent = allRestoredAreasRaster outFocalStats = arcpy.sa.FocalStatistics(allRestoredAreasRaster,inNeighborhood, "MEAN","DATA") allRestoredFocalRaster = os.path.join(outputGDB,'allRestFocRas_iter'+str(iter)) outFocalStats.save(allRestoredFocalRaster) # Anything > 0 would include a restored area and arcpy.env.extent = resRast restMaskRaster = os.path.join(outputGDB,'restMaskRaster_iter'+str(iter)) minval = 0 outCon = arcpy.sa.Con((Raster(allRestoredFocalRaster) == float(minval)), 1) outCon.save(restMaskRaster) # Candidate areas have not been restored, have cost data, meet # minimum improvement score criteria, and have enough ag in them candidateBarrierRaster = os.path.join(outputGDB, 'candidateBarrierRaster' + '_iter'+str(iter)) if iter > 1: gprint('Creating candidate restoration raster using barrier results, previous restorations, and selection criteria') outCalc = (Raster(costMaskRaster) * Raster(agMaskRaster) * Raster(barrierRaster) * Raster(restMaskRaster) * (radius * 2)) # ROI scores will be in terms of total improvement (= score * diameter) else: outCalc = (Raster(costMaskRaster) * Raster(agMaskRaster) * Raster(barrierRaster) * radius * 2) minBarrierScore = minImprovementVal * radius * 2 if restoredResistanceVal != 1: outCalc2 = (outCalc - (2 * radius * (restoredResistanceVal - 1))) outCon = arcpy.sa.Con((outCalc2 >= float(minBarrierScore)), outCalc2) else: outCon = arcpy.sa.Con((outCalc >= float(minBarrierScore)), outCalc) outCon.save(candidateBarrierRaster) lu.build_stats(candidateBarrierRaster) purchaseRoiRaster = os.path.join(outputGDB, 'purchaseRoiRaster' + '_iter'+str(iter)) outCalc = Raster(candidateBarrierRaster) / Raster(purchCostRaster) outCalc.save(purchaseRoiRaster) lu.build_stats(purchaseRoiRaster) totalRoiRaster = os.path.join(outputGDB, 'purchaseRestRoiRaster' + '_iter'+str(iter)) outCalc = Raster(candidateBarrierRaster) / Raster(totalCostRaster) outCalc.save(totalRoiRaster) lu.build_stats(totalRoiRaster) maxBarrier = arcpy.GetRasterProperties_management(candidateBarrierRaster,"MAXIMUM") gprint('Maximum barrier improvement score: '+str(maxBarrier.getOutput(0))) if maxBarrier < 0: arcpy.AddWarning("\nNo barriers found that meet CWD or Ag threshold criteria.") maxPurchROI = arcpy.GetRasterProperties_management(purchaseRoiRaster,"MAXIMUM") gprint('Maximum purchase ROI score: '+str(maxPurchROI.getOutput(0))) maxROI = arcpy.GetRasterProperties_management(totalRoiRaster,"MAXIMUM") gprint('Maximum total ROI score: '+str(maxROI.getOutput(0))) if restoreMaxROI: outPoint = os.path.join(outputGDB, 'maxRoiPoint'+'_iter'+str(iter)) gprint('Choosing circle with maximum ROI to restore') outCon = arcpy.sa.Con((Raster(totalRoiRaster) >= float(maxROI.getOutput(0))), totalRoiRaster) maxRoiRaster = os.path.join(outputGDB, 'maxRoiRaster') outCon.save(maxRoiRaster) # Save max ROI to point try: arcpy.RasterToPoint_conversion(maxRoiRaster, outPoint) except: msg = ('Error: it looks like there are no viable restoration candidates.') lu.raise_error(msg) else: #Restoring strongest barrier instead outPoint = os.path.join(outputGDB, 'maxBarrierPoint'+'_iter'+str(iter)) gprint('Choosing circle with maximum BARRIER IMPROVEMENT SCORE to restore') outCon = arcpy.sa.Con((Raster(candidateBarrierRaster) >= float(maxBarrier.getOutput(0))), candidateBarrierRaster) maxBarrierRaster = os.path.join(outputGDB, 'maxBarrierRaster') outCon.save(maxBarrierRaster) # Save max barrier to point try: arcpy.RasterToPoint_conversion(maxBarrierRaster, outPoint) except: msg = ('Error: it looks like there are no viable restoration candidates.') lu.raise_error(msg) gprint('Done evaluating candidate restorations') result = int(arcpy.GetCount_management(outPoint).getOutput(0)) if result > 1: arcpy.AddWarning('Deleting points with identical ROI/improvement score values') # Would be better to retain point with max barrier score when we have multiple points with same ROI arcpy.DeleteIdentical_management(outPoint, "grid_code", 0.1, 0.1) arcpy.sa.ExtractMultiValuesToPoints(outPoint, [[candidateBarrierRaster, "barrierScore"],[purchCostRaster, "purchCost"], [totalCostRaster, "totalCost"],[purchaseRoiRaster, "purchaseROI"], [totalRoiRaster, "totalROI"]], "NONE") arcpy.AddField_management(outPoint, "restorationNumber", "SHORT") arcpy.CalculateField_management(outPoint, "restorationNumber", iter) arcpy.AddField_management(outPoint, "radius", "DOUBLE") arcpy.CalculateField_management(outPoint, "radius", radius) arcpy.AddField_management(outPoint, "barrierScore_per_m", "DOUBLE") arcpy.CalculateField_management(outPoint, "barrierScore_per_m", "(float(!barrierScore!) / (!radius! * 2))", "PYTHON") gprint('\nCreating restoration circles') if restoreMaxROI: circleFC = os.path.join(outputGDB, 'maxRoiCircle'+'_iter'+str(iter)) else: circleFC = os.path.join(outputGDB, 'maxBarrierCircle'+'_iter'+str(iter)) arcpy.Buffer_analysis(outPoint, circleFC, radius) gprint('Rasterizing restoration circles') if restoreMaxROI: circleRas = os.path.join(outputGDB, 'maxRoiCircleRas'+'_iter'+str(iter)) else: circleRas = os.path.join(outputGDB, 'maxBarrierCircleRas'+'_iter'+str(iter)) arcpy.FeatureToRaster_conversion(circleFC, 'totalROI', circleRas, arcpy.env.cellSize) # restore raster gprint('Digitally restoring resistance raster') resRastRestored = os.path.join(outputGDB, 'resRastRestored'+'_iter'+str(iter)) outCon = arcpy.sa.Con(IsNull(circleRas), resRast, restoredResistanceVal) outCon.save(resRastRestored) allRestoredAreasRaster = os.path.join(outputGDB, 'allRestoredAreas_iter'+str(iter)) PrevRestoredAreasRaster= os.path.join(outputGDB, 'allRestoredAreas_iter'+str(iter-1)) if iter == 1: outCon = arcpy.sa.Con(IsNull(circleRas), 0, 1) else: outCon = arcpy.sa.Con(IsNull(circleRas), PrevRestoredAreasRaster, 1) # Add this restoration to areas restored outCon.save(allRestoredAreasRaster) lu.delete_data(circleRas) resRast = resRastRestored # Use for next iteration resistance raster #Add circle into feature class with all circles if restoreMaxROI: allCirclesFC = os.path.join(outputGDB,"allCirclesMaxROI") else: allCirclesFC = os.path.join(outputGDB,"allCirclesMaxBarriers") if iter == 1: arcpy.CopyFeatures_management(circleFC, allCirclesFC) else: arcpy.Append_management(circleFC, allCirclesFC, "TEST") gprint('Finished iteration #'+str(iter)) start_time1 = lu.elapsed_time(start_time1) gprint('\nDone with iterations.') start_time = lu.elapsed_time(start_time) gprint('Outputs saved in: '+outputGDB) gprint('Back up your project directories if you want to save corridor/barrier results.') # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Iteration script failed. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Iteration script failed. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def STEP4_refine_network(): """Allows user to only connect each core area to its N nearest neighbors, then connect any disjunct clusters ('constellations') of core areas to their nearest neighboring cluster """ try: lu.dashline(1) gprint('Running script ' + _SCRIPT_NAME) cfg.gp.Workspace = cfg.OUTPUTDIR linkTableFile = lu.get_prev_step_link_table(step=4) linkTable = lu.load_link_table(linkTableFile) numLinks = linkTable.shape[0] lu.report_links(linkTable) if not cfg.STEP3: # re-check for links that are too long in case script run out of # sequence with more stringent settings gprint('Double-checking for corridors that are too long' ' or too short to map.') DISABLE_LEAST_COST_NO_VAL = True linkTable,numDroppedLinks = lu.drop_links( linkTable, cfg.MAXEUCDIST, 0, cfg.MAXCOSTDIST, 0, DISABLE_LEAST_COST_NO_VAL) rows, cols = npy.where( linkTable[:,cfg.LTB_LINKTYPE:cfg.LTB_LINKTYPE + 1] > 0) # == cfg.LT_CORR # or # linkTable[:,cfg.LTB_LINKTYPE:cfg.LTB_LINKTYPE + 1] == cfg.LT_KEEP) corridorLinks = linkTable[rows,:] coresToProcess = npy.unique( corridorLinks[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) if cfg.S4DISTTYPE_EU: distCol = cfg.LTB_EUCDIST else: distCol = cfg.LTB_CWDIST # Flag links that do not connect any core areas to their nearest # N neighbors. (N = cfg.S4MAXNN) lu.dashline(1) gprint('Connecting each core area to its nearest ' + str(cfg.S4MAXNN) + ' nearest neighbors.') # Code written assuming NO duplicate core pairs for core in coresToProcess: rows,cols = npy.where( corridorLinks[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1] == core) distsFromCore = corridorLinks[rows,:] # Sort by distance from target core ind = npy.argsort(distsFromCore[:,distCol]) distsFromCore = distsFromCore[ind] # Set N nearest neighbor connections to Nearest Neighbor (NNCT) maxRange = min(len(rows), cfg.S4MAXNN) for link in range (0, maxRange): linkId = distsFromCore[link, cfg.LTB_LINKID] # assumes linktable sequentially numbered with no gaps linkTable[linkId - 1, cfg.LTB_LINKTYPE] = cfg.LT_NNCT # Connect constellations (aka compoments or clusters) # Fixme: needs testing. Move to function. if cfg.S4CONNECT: lu.dashline(1) gprint('Connecting constellations') # linkTableComp has 4 extra cols to track COMPONENTS numLinks = linkTable.shape[0] # g1' g2' THEN c1 c2 compCols = npy.zeros((numLinks, 4), dtype="int32") linkTableComp = npy.append(linkTable, compCols, axis=1) del compCols # renumber cores to save memory for this next step. Place in # columns 10 and 11 for coreInd in range(0, len(coresToProcess)): # here, cols are 0 for cfg.LTB_CORE1 and 1 for cfg.LTB_CORE2 rows, cols = npy.where( linkTableComp[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1] == coresToProcess[coreInd]) # want results in cols 10 and 11- These are NEW core numbers # (0 - numcores) linkTableComp[rows, cols + 10] = coreInd rows, cols = npy.where( linkTableComp[:, cfg.LTB_LINKTYPE:cfg.LTB_LINKTYPE + 1] == cfg.LT_NNCT) # The new, improved corridorLinks- only NN links corridorLinksComp = linkTableComp[rows, :] # These are NEW core numbers (range from 0 to numcores) coresToProcess = npy.unique(linkTableComp[:, 10:12]) #Create graph describing connected cores. Graph = npy.zeros((len(coresToProcess), len(coresToProcess)), dtype="int32") rows = corridorLinksComp[:,10].astype('int32') cols = corridorLinksComp[:,11].astype('int32') vals = npy.where(corridorLinksComp[:,cfg.LTB_LINKTYPE] == cfg.LT_NNCT, cfg.LT_CORR, 0) Graph[rows,cols] = vals Graph = Graph + Graph.T # Use graph to identify components (disconnected sub-groups) in # core area network components = lu.components_no_sparse(Graph) for coreInd in range(0,len(coresToProcess)): # In resulting cols, cols are 0 for LTB_CORE1 and 1 for # LTB_CORE2 rows, cols = (npy.where(linkTableComp[:,10:12] == coresToProcess[coreInd])) # want results in cols 12 and 13 Note: we've replaced new core # numbers with COMPONENT numbers. linkTableComp[rows,cols+12] = components[coreInd] # Additional column indexes for linkTableComp component1Col = 12 component2Col = 13 linkTableComp[:,cfg.LTB_CLUST1] = linkTableComp[:,component1Col] linkTableComp[:,cfg.LTB_CLUST2] = linkTableComp[:,component2Col] # Sort by distance ind = npy.argsort(linkTableComp[:,distCol]) linkTableComp = linkTableComp[ind] # Connect constellations via shortest inter-constellation links, # until all constellations connected. for row in range(0,numLinks): if ((linkTableComp[row,distCol] > 0) and ((linkTableComp[row,cfg.LTB_LINKTYPE] == cfg.LT_CORR) or (linkTableComp[row,cfg.LTB_LINKTYPE] == cfg.LT_KEEP)) and (linkTableComp[row,component1Col] != linkTableComp[row,component2Col])): # Make this an inter-component link linkTableComp[row,cfg.LTB_LINKTYPE] = cfg.LT_CLU newComp = min(linkTableComp [row,component1Col:component2Col + 1]) oldComp = max(linkTableComp [row,component1Col:component2Col + 1]) # cols are 0 and 1 rows, cols = npy.where(linkTableComp [:,component1Col:component2Col + 1] == oldComp) # want results in cols 12 and 13 linkTableComp[rows,cols + 12] = newComp # Remove extra columns from link table linkTable = lu.delete_col(linkTableComp,[10, 11, 12, 13]) # Re-sort link table by link ID ind = npy.argsort(linkTable[:,cfg.LTB_LINKID]) linkTable = linkTable[ind] # At end, any non-constellation links that are not NN's get dropped # (too long to be in cfg.S4MAXNN, not a component link) rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE] == cfg.LT_CORR) linkTable[rows,cfg.LTB_LINKTYPE] = cfg.LT_CPLK # set NNCT links to NN corridor links (NNC), get rid # of extra columns, re-sort linktable rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE] == cfg.LT_NNCT) linkTable[rows,cfg.LTB_LINKTYPE] = cfg.LT_NNC # Write linkTable to disk outlinkTableFile = lu.get_this_step_link_table(step=4) # lu.dashline(1) gprint('\nWriting ' + outlinkTableFile) lu.write_link_table(linkTable, outlinkTableFile) linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s4.csv") lu.write_link_table(linkTable, linkTableLogFile) start_time = time.clock() lu.update_lcp_shapefile(linkTable, lastStep=3, thisStep=4) start_time = lu.elapsed_time(start_time) # lu.dashline() gprint('Creating shapefiles with linework for links.') try: lu.write_link_maps(outlinkTableFile, step=4) except: lu.write_link_maps(outlinkTableFile, step=4) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: gprint('****Failed in step 4. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: gprint('****Failed in step 4. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def generate_distance_file(): """Use ArcGIS to create Conefor distance file For ArcGIS Desktop users an Advanced license is required. """ try: arcpy.env.cellSize = arcpy.Raster(cfg.RESRAST).meanCellHeight S2COREFC = cfg.COREFC if cfg.SIMPLIFY_CORES: try: gprint( 'Simplifying polygons for core pair distance calculations') COREFC_SIMP = path.join(cfg.SCRATCHDIR, "CoreFC_Simp.shp") tolerance = float(arcpy.env.cellSize) / 3 arcpy.cartography.SimplifyPolygon(cfg.COREFC, COREFC_SIMP, "POINT_REMOVE", tolerance, "#", "NO_CHECK") S2COREFC = COREFC_SIMP except Exception: pass # In case point geometry is entered for core area FC arcpy.env.workspace = cfg.SCRATCHDIR FS2COREFC = "fscores" FS2COREFC2 = "fscores2" arcpy.MakeFeatureLayer_management(S2COREFC, FS2COREFC) arcpy.MakeFeatureLayer_management(S2COREFC, FS2COREFC2) output = [] csvseparator = "\t" adjList = get_full_adj_list() gprint('\nFinding distances between cores using Generate Near Table.') near_tbl = path.join(cfg.SCRATCHDIR, "neartbl.dbf") gprint('There are ' + str(len(adjList)) + ' adjacent core pairs to ' 'process.') pctDone = 0 start_time = time.clock() for x in range(0, len(adjList)): pctDone = lu.report_pct_done(x, len(adjList), pctDone) sourceCore = adjList[x, 0] targetCore = adjList[x, 1] expression = cfg.COREFN + " = " + str(sourceCore) arcpy.SelectLayerByAttribute_management(FS2COREFC, "NEW_SELECTION", expression) expression = cfg.COREFN + " = " + str(targetCore) arcpy.SelectLayerByAttribute_management(FS2COREFC2, "NEW_SELECTION", expression) arcpy.GenerateNearTable_analysis(FS2COREFC, FS2COREFC2, near_tbl, "#", "NO_LOCATION", "NO_ANGLE", "ALL", "0") rows = arcpy.SearchCursor(near_tbl) row = next(rows) minDist = 1e20 if row: # May be running on selected core areas in step 2 while row: dist = row.getValue("NEAR_DIST") if dist <= 0: # In case simplified polygons abut one another dist = float(arcpy.env.cellSize) if dist < minDist: minDist = dist outputrow = [] outputrow.append(str(sourceCore)) outputrow.append(str(targetCore)) outputrow.append(str(dist)) del row row = next(rows) del rows output.append(csvseparator.join(outputrow)) start_time = lu.elapsed_time(start_time) # In case coreFC is grouped in TOC, get coreFN for non-Arc statement group, coreFN = path.split(cfg.COREFC) dist_fname = path.join(cfg.PROJECTDIR, (coreFN + "_dists.txt")) dist_file = open(dist_fname, 'w') dist_file.write('\n'.join(output)) dist_file.close() gprint('Distance file ' + dist_fname + ' generated.\n') return dist_fname except arcpy.ExecuteError: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except Exception: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def STEP4_refine_network(): """Allows user to only connect each core area to its N nearest neighbors, then connect any disjunct clusters ('constellations') of core areas to their nearest neighboring cluster """ try: lu.dashline(1) gprint('Running script ' + _SCRIPT_NAME) cfg.gp.Workspace = cfg.OUTPUTDIR linkTableFile = lu.get_prev_step_link_table(step=4) linkTable = lu.load_link_table(linkTableFile) numLinks = linkTable.shape[0] lu.report_links(linkTable) if not cfg.STEP3: # re-check for links that are too long in case script run out of # sequence with more stringent settings gprint('Double-checking for corridors that are too long' ' or too short to map.') DISABLE_LEAST_COST_NO_VAL = True linkTable, numDroppedLinks = lu.drop_links( linkTable, cfg.MAXEUCDIST, 0, cfg.MAXCOSTDIST, 0, DISABLE_LEAST_COST_NO_VAL) rows, cols = npy.where( linkTable[:, cfg.LTB_LINKTYPE:cfg.LTB_LINKTYPE + 1] > 0) # == cfg.LT_CORR # or # linkTable[:,cfg.LTB_LINKTYPE:cfg.LTB_LINKTYPE + 1] == cfg.LT_KEEP) corridorLinks = linkTable[rows, :] coresToProcess = npy.unique( corridorLinks[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) if cfg.S4DISTTYPE_EU: distCol = cfg.LTB_EUCDIST else: distCol = cfg.LTB_CWDIST # Flag links that do not connect any core areas to their nearest # N neighbors. (N = cfg.S4MAXNN) lu.dashline(1) # optionally ignore max nearest neighbor setting if cfg.IGNORES4MAXNN: gprint('Connecting each core area to all its neighbors.') else: gprint('Connecting each core area to its nearest ' + str(cfg.S4MAXNN) + ' nearest neighbors.') # Code written assuming NO duplicate core pairs for core in coresToProcess: rows, cols = npy.where(corridorLinks[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1] == core) distsFromCore = corridorLinks[rows, :] # Sort by distance from target core ind = npy.argsort(distsFromCore[:, distCol]) distsFromCore = distsFromCore[ind] # Set N nearest neighbor connections to Nearest Neighbor (NNCT) # optionally ignore max nearest neighbor setting if cfg.IGNORES4MAXNN: maxRange = len(rows) else: maxRange = min(len(rows), cfg.S4MAXNN) for link in range(0, maxRange): linkId = distsFromCore[link, cfg.LTB_LINKID] # assumes linktable sequentially numbered with no gaps linkTable[linkId - 1, cfg.LTB_LINKTYPE] = cfg.LT_NNCT # Connect constellations (aka components or clusters) # Fixme: needs testing. Move to function. if cfg.S4CONNECT: lu.dashline(1) gprint('Connecting constellations') # linkTableComp has 4 extra cols to track COMPONENTS numLinks = linkTable.shape[0] # g1' g2' THEN c1 c2 compCols = npy.zeros((numLinks, 4), dtype="int32") linkTableComp = npy.append(linkTable, compCols, axis=1) del compCols # renumber cores to save memory for this next step. Place in # columns 10 and 11 for coreInd in range(0, len(coresToProcess)): # here, cols are 0 for cfg.LTB_CORE1 and 1 for cfg.LTB_CORE2 rows, cols = npy.where( linkTableComp[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1] == coresToProcess[coreInd]) # want results in cols 10 and 11- These are NEW core numbers # (0 - numcores) linkTableComp[rows, cols + 10] = coreInd rows, cols = npy.where( linkTableComp[:, cfg.LTB_LINKTYPE:cfg.LTB_LINKTYPE + 1] == cfg.LT_NNCT) # The new, improved corridorLinks- only NN links corridorLinksComp = linkTableComp[rows, :] # These are NEW core numbers (range from 0 to numcores) coresToProcess = npy.unique(linkTableComp[:, 10:12]) #Create graph describing connected cores. Graph = npy.zeros((len(coresToProcess), len(coresToProcess)), dtype="int32") rows = corridorLinksComp[:, 10].astype('int32') cols = corridorLinksComp[:, 11].astype('int32') vals = npy.where( corridorLinksComp[:, cfg.LTB_LINKTYPE] == cfg.LT_NNCT, cfg.LT_CORR, 0) Graph[rows, cols] = vals Graph = Graph + Graph.T # Use graph to identify components (disconnected sub-groups) in # core area network components = lu.components_no_sparse(Graph) for coreInd in range(0, len(coresToProcess)): # In resulting cols, cols are 0 for LTB_CORE1 and 1 for # LTB_CORE2 rows, cols = (npy.where( linkTableComp[:, 10:12] == coresToProcess[coreInd])) # want results in cols 12 and 13 Note: we've replaced new core # numbers with COMPONENT numbers. linkTableComp[rows, cols + 12] = components[coreInd] # Additional column indexes for linkTableComp component1Col = 12 component2Col = 13 linkTableComp[:, cfg.LTB_CLUST1] = linkTableComp[:, component1Col] linkTableComp[:, cfg.LTB_CLUST2] = linkTableComp[:, component2Col] # Sort by distance ind = npy.argsort(linkTableComp[:, distCol]) linkTableComp = linkTableComp[ind] # Connect constellations via shortest inter-constellation links, # until all constellations connected. for row in range(0, numLinks): if ((linkTableComp[row, distCol] > 0) and ((linkTableComp[row, cfg.LTB_LINKTYPE] == cfg.LT_CORR) or (linkTableComp[row, cfg.LTB_LINKTYPE] == cfg.LT_KEEP)) and (linkTableComp[row, component1Col] != linkTableComp[row, component2Col])): # Make this an inter-component link linkTableComp[row, cfg.LTB_LINKTYPE] = cfg.LT_CLU newComp = min( linkTableComp[row, component1Col:component2Col + 1]) oldComp = max( linkTableComp[row, component1Col:component2Col + 1]) # cols are 0 and 1 rows, cols = npy.where( linkTableComp[:, component1Col:component2Col + 1] == oldComp) # want results in cols 12 and 13 linkTableComp[rows, cols + 12] = newComp # Remove extra columns from link table linkTable = lu.delete_col(linkTableComp, [10, 11, 12, 13]) # Re-sort link table by link ID ind = npy.argsort(linkTable[:, cfg.LTB_LINKID]) linkTable = linkTable[ind] # At end, any non-constellation links that are not NN's get dropped # (too long to be in cfg.S4MAXNN, not a component link) rows = npy.where(linkTable[:, cfg.LTB_LINKTYPE] == cfg.LT_CORR) linkTable[rows, cfg.LTB_LINKTYPE] = cfg.LT_CPLK # set NNCT links to NN corridor links (NNC), get rid # of extra columns, re-sort linktable rows = npy.where(linkTable[:, cfg.LTB_LINKTYPE] == cfg.LT_NNCT) linkTable[rows, cfg.LTB_LINKTYPE] = cfg.LT_NNC # Write linkTable to disk outlinkTableFile = lu.get_this_step_link_table(step=4) # lu.dashline(1) gprint('\nWriting ' + outlinkTableFile) lu.write_link_table(linkTable, outlinkTableFile) linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s4.csv") lu.write_link_table(linkTable, linkTableLogFile) start_time = time.clock() lu.update_lcp_shapefile(linkTable, lastStep=3, thisStep=4) start_time = lu.elapsed_time(start_time) # lu.dashline() gprint('Creating shapefiles with linework for links.') try: lu.write_link_maps(outlinkTableFile, step=4) except: lu.write_link_maps(outlinkTableFile, step=4) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: gprint('****Failed in step 4. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: gprint('****Failed in step 4. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def do_radius_loop(): """Do radius loop.""" link_table = link_table_tmp.copy() start_time = time.clock() link_loop = 0 pct_done = 0 gprint('\nMapping barriers at a radius of ' + str(radius) + ' ' + str(map_units)) if cfg.SUM_BARRIERS: gprint('using SUM method') else: gprint('using MAXIMUM method') if num_corridor_links > 1: gprint('0 percent done') last_mosaic_ras = None last_mosaic_ras_pct = None for x in range(0, num_links): pct_done = lu.report_pct_done( link_loop, num_corridor_links, pct_done) if ((link_table[x, cfg.LTB_LINKTYPE] > 0) and (link_table[x, cfg.LTB_LINKTYPE] < 1000)): link_loop = link_loop + 1 # source and target cores corex = int(core_list[x, 0]) corey = int(core_list[x, 1]) # Get cwd rasters for source and target cores cwd_ras1 = lu.get_cwd_path(corex) cwd_ras2 = lu.get_cwd_path(corey) # Mask out areas above CWD threshold cwd_tmp1 = None cwd_tmp2 = None if cfg.BARRIER_CWD_THRESH is not None: if x == 1: lu.dashline(1) gprint(' Using CWD threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.env.snapRaster = cfg.RESRAST cwd_tmp1 = path.join(cfg.SCRATCHDIR, "tmp" + str(corex)) out_con = arcpy.sa.Con( cwd_ras1 < float(cfg.BARRIER_CWD_THRESH), cwd_ras1) out_con.save(cwd_tmp1) cwd_ras1 = cwd_tmp1 cwd_tmp2 = path.join(cfg.SCRATCHDIR, "tmp" + str(corey)) out_con = arcpy.sa.Con( cwd_ras2 < float(cfg.BARRIER_CWD_THRESH), cwd_ras2) out_con.save(cwd_tmp2) cwd_ras2 = cwd_tmp2 focal_ras1 = lu.get_focal_path(corex, radius) focal_ras2 = lu.get_focal_path(corey, radius) link = lu.get_links_from_core_pairs(link_table, corex, corey) lc_dist = float(link_table[link, cfg.LTB_CWDIST]) # Detect barriers at radius using neighborhood stats # Create the Neighborhood Object inner_radius = radius - 1 outer_radius = radius dia = 2 * radius in_neighborhood = ("ANNULUS " + str(inner_radius) + " " + str(outer_radius) + " MAP") @Retry(10) def exec_focal(): """Execute focal statistics.""" if not path.exists(focal_ras1): arcpy.env.extent = cwd_ras1 out_focal_stats = arcpy.sa.FocalStatistics( cwd_ras1, in_neighborhood, "MINIMUM", "DATA") if SET_CORES_TO_NULL: # Set areas overlapping cores to NoData xxx out_focal_stats2 = arcpy.sa.Con( out_focal_stats > 0, out_focal_stats) out_focal_stats2.save(focal_ras1) else: out_focal_stats.save(focal_ras1) arcpy.env.extent = cfg.RESRAST if not path.exists(focal_ras2): arcpy.env.extent = cwd_ras2 out_focal_stats = arcpy.sa.FocalStatistics( cwd_ras2, in_neighborhood, "MINIMUM", "DATA") if SET_CORES_TO_NULL: # Set areas overlapping cores to NoData xxx out_focal_stats2 = arcpy.sa.Con( out_focal_stats > 0, out_focal_stats) out_focal_stats2.save(focal_ras2) else: out_focal_stats.save(focal_ras2) arcpy.env.extent = cfg.RESRAST exec_focal() lu.delete_data(cwd_tmp1) lu.delete_data(cwd_tmp2) barrier_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey)+'.tif') # Need to set nulls to 0, # also create trim rasters as we go if cfg.SUM_BARRIERS: out_ras = ((lc_dist - arcpy.sa.Raster(focal_ras1) - arcpy.sa.Raster(focal_ras2) - dia) / dia) out_con = arcpy.sa.Con(arcpy.sa.IsNull(out_ras), 0, out_ras) out_con2 = arcpy.sa.Con(out_con < 0, 0, out_con) out_con2.save(barrier_ras) # Execute FocalStatistics to fill out search radii in_neighborhood = ("CIRCLE " + str(outer_radius) + " MAP") fill_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_fill.tif") out_focal_stats = arcpy.sa.FocalStatistics( barrier_ras, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras) if cfg.WRITE_TRIM_RASTERS: trm_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_trim.tif") ras_list = [fill_ras, resist_fill_ras] out_cell_statistics = arcpy.sa.CellStatistics( ras_list, "MINIMUM") out_cell_statistics.save(trm_ras) else: @Retry(10) def clac_ben(): """Calculate potential benefit. Calculate potential benefit per map unit restored. """ out_ras = ( (lc_dist - arcpy.sa.Raster(focal_ras1) - arcpy.sa.Raster(focal_ras2) - dia) / dia) out_ras.save(barrier_ras) clac_ben() if cfg.WRITE_PCT_RASTERS: # Calculate % potential benefit per unit restored barrier_ras_pct = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + '_pct.tif') @Retry(10) def calc_ben_pct(): """Calc benefit percentage.""" outras = (100 * (arcpy.sa.Raster(barrier_ras) / lc_dist)) outras.save(barrier_ras_pct) calc_ben_pct() # Mosaic barrier results across core area pairs mosaic_dir = path.join(cfg.SCRATCHDIR, 'mos' + str(rad_id) + '_' + str(x + 1)) lu.create_dir(mosaic_dir) mos_fn = 'mos_temp' tmp_mosaic_ras = path.join(mosaic_dir, mos_fn) tmp_mosaic_ras_trim = path.join(mosaic_dir, 'mos_temp_trm') arcpy.env.workspace = mosaic_dir if link_loop == 1: last_mosaic_ras_trim = None # For first grid copy rather than mosaic arcpy.CopyRaster_management(barrier_ras, tmp_mosaic_ras) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: arcpy.CopyRaster_management( trm_ras, tmp_mosaic_ras_trim) else: if cfg.SUM_BARRIERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras) < 0, last_mosaic_ras, arcpy.sa.Raster(barrier_ras) + arcpy.sa.Raster(last_mosaic_ras)) out_con.save(tmp_mosaic_ras) if cfg.WRITE_TRIM_RASTERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(trm_ras) < 0, last_mosaic_ras_trim, arcpy.sa.Raster(trm_ras) + arcpy.sa.Raster(last_mosaic_ras_trim) ) out_con.save(tmp_mosaic_ras_trim) else: in_rasters = (";".join([barrier_ras, last_mosaic_ras])) @Retry(10) def mosaic_to_new(): """Mosaic to new raster.""" arcpy.MosaicToNewRaster_management( input_rasters=in_rasters, output_location=mosaic_dir, raster_dataset_name_with_extension\ =mos_fn, pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, number_of_bands="1", mosaic_method="MAXIMUM") mosaic_to_new() if link_loop > 1: # Clean up from previous loop lu.delete_data(last_mosaic_ras) last_mosaic_dir = path.dirname(last_mosaic_ras) lu.clean_out_workspace(last_mosaic_dir) lu.delete_dir(last_mosaic_dir) last_mosaic_ras = tmp_mosaic_ras if cfg.WRITE_TRIM_RASTERS: last_mosaic_ras_trim = tmp_mosaic_ras_trim if cfg.WRITE_PCT_RASTERS: mos_pct_fn = 'mos_temp_pct' mosaic_dir_pct = path.join(cfg.SCRATCHDIR, 'mosP' + str(rad_id) + '_' + str(x+1)) lu.create_dir(mosaic_dir_pct) tmp_mosaic_ras_pct = path.join(mosaic_dir_pct, mos_pct_fn) if link_loop == 1: # If this is the first grid then copy # rather than mosaic if cfg.SUM_BARRIERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras_pct) < 0, 0, arcpy.sa.Con(arcpy.sa.IsNull (barrier_ras_pct), 0, barrier_ras_pct)) out_con.save(tmp_mosaic_ras_pct) else: arcpy.CopyRaster_management( barrier_ras_pct, tmp_mosaic_ras_pct) else: if cfg.SUM_BARRIERS: @Retry(10) def sum_barriers(): """Sum barriers.""" out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras_pct) < 0, last_mosaic_ras_pct, arcpy.sa.Raster(barrier_ras_pct) + arcpy.sa.Raster( last_mosaic_ras_pct)) out_con.save(tmp_mosaic_ras_pct) sum_barriers() else: in_rasters = (";".join([barrier_ras_pct, last_mosaic_ras_pct])) @Retry(10) def max_barriers(): """Get max barriers.""" arcpy.MosaicToNewRaster_management( input_rasters=in_rasters, output_location=mosaic_dir_pct, raster_dataset_name_with_extension =mos_pct_fn, pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, number_of_bands="1", mosaic_method="MAXIMUM") max_barriers() if link_loop > 1: # Clean up from previous loop lu.delete_data(last_mosaic_ras_pct) last_mosaic_dir_pct = path.dirname( last_mosaic_ras_pct) lu.clean_out_workspace(last_mosaic_dir_pct) lu.delete_dir(last_mosaic_dir_pct) last_mosaic_ras_pct = tmp_mosaic_ras_pct if not cfg.SAVEBARRIERRASTERS: lu.delete_data(barrier_ras) if cfg.WRITE_PCT_RASTERS: lu.delete_data(barrier_ras_pct) if cfg.WRITE_TRIM_RASTERS: lu.delete_data(trm_ras) # Temporarily disable links in linktable - # don't want to mosaic them twice for y in range(x + 1, num_links): corex1 = int(core_list[y, 0]) corey1 = int(core_list[y, 1]) if corex1 == corex and corey1 == corey: link_table[y, cfg.LTB_LINKTYPE] = ( link_table[y, cfg.LTB_LINKTYPE] + 1000) elif corex1 == corey and corey1 == corex: link_table[y, cfg.LTB_LINKTYPE] = ( link_table[y, cfg.LTB_LINKTYPE] + 1000) if num_corridor_links > 1 and pct_done < 100: gprint('100 percent done') gprint('Summarizing barrier data for search radius.') # Rows that were temporarily disabled rows = npy.where(link_table[:, cfg.LTB_LINKTYPE] > 1000) link_table[rows, cfg.LTB_LINKTYPE] = ( link_table[rows, cfg.LTB_LINKTYPE] - 1000) # ----------------------------------------------------------------- # Set negative values to null or zero and write geodatabase. mosaic_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" + str(radius)) mosaic_ras = path.join(cfg.BARRIERGDB, mosaic_fn) arcpy.env.extent = cfg.RESRAST out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras, tmp_mosaic_ras, "VALUE < 0") # xxx orig out_set_null.save(mosaic_ras) lu.delete_data(tmp_mosaic_ras) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: mosaic_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + "_Rad" + str(radius)) mosaic_ras_trim = path.join(cfg.BARRIERGDB, mosaic_fn) arcpy.CopyRaster_management(tmp_mosaic_ras_trim, mosaic_ras_trim) lu.delete_data(tmp_mosaic_ras) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster mosaic_pct_fn = (prefix + "_BarrierCenters_Pct" + sum_suffix + "_Rad" + str(radius)) arcpy.env.extent = cfg.RESRAST out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras_pct, tmp_mosaic_ras_pct, "VALUE < 0") mosaic_ras_pct = path.join(cfg.BARRIERGDB, mosaic_pct_fn) out_set_null.save(mosaic_ras_pct) lu.delete_data(tmp_mosaic_ras_pct) # 'Grow out' maximum restoration gain to # neighborhood size for display in_neighborhood = "CIRCLE " + str(outer_radius) + " MAP" # Execute FocalStatistics fill_ras_fn = "barriers_fill" + str(outer_radius) + TIF fill_ras = path.join(cfg.BARRIERBASEDIR, fill_ras_fn) out_focal_stats = arcpy.sa.FocalStatistics( mosaic_ras, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster fill_ras_pct_fn = ( "barriers_fill_pct" + str(outer_radius) + TIF) fill_ras_pct = path.join(cfg.BARRIERBASEDIR, fill_ras_pct_fn) out_focal_stats = arcpy.sa.FocalStatistics( mosaic_ras_pct, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras_pct) # Place copies of filled rasters in output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fill_ras_fn = (prefix + "_BarrrierCircles" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(fill_ras, fill_ras_fn) if cfg.WRITE_PCT_RASTERS: fill_ras_pct_fn = (prefix + "_BarrrierCircles_Pct" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(fill_ras_pct, fill_ras_pct_fn) if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: # Create pared-down version of filled raster- remove pixels # that don't need restoring by allowing a pixel to only # contribute its resistance value to restoration gain out_ras_fn = "barriers_trm" + str(outer_radius) + TIF out_ras = path.join(cfg.BARRIERBASEDIR, out_ras_fn) ras_list = [fill_ras, resist_fill_ras] out_cell_statistics = arcpy.sa.CellStatistics(ras_list, "MINIMUM") out_cell_statistics.save(out_ras) # SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER out_ras_2fn = ("barriers_trm" + sum_suffix + str(outer_radius) + "_2" + TIF) out_ras2 = path.join(cfg.BARRIERBASEDIR, out_ras_2fn) output = arcpy.sa.Con(arcpy.sa.IsNull(fill_ras), fill_ras, out_ras) output.save(out_ras2) out_ras_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(out_ras2, out_ras_fn) start_time = lu.elapsed_time(start_time)