def setup_wrkspace(gisdbase, ccr_grassrc, geo_file): """Setup GRASS workspace and modify windows path for GRASS GDAL""" lm_util.gprint("Creating GRASS workspace") gisbase = cc_env.gisbase location = "gcwd" mapset = "PERMANENT" os.environ['GISRC'] = ccr_grassrc os.environ['LD_LIBRARY_PATH'] = os.path.join(gisbase, "lib") os.environ['GRASS_SH'] = os.path.join(gisbase, "msys", "bin", "sh.exe") try: grass.create_location(gisdbase, location, filename=geo_file) except: cc_util.gdal_fail_check() arcpy.AddWarning("GRASS ERROR. Try rebooting and restarting ArcGIS.") arcpy.AddWarning("If that doesn't work you can try using ") arcpy.AddWarning("the 'CC Run Script.py' python script in the ") arcpy.AddWarning("demo directory where the Linkage Mapper toolbox") arcpy.AddWarning("is installed instead of ArcGIS to call the tool") arcpy.AddWarning("(see user guide).") raise Exception("GRASS ERROR: Cannot create workspace.") gsetup.init(gisbase, gisdbase, location, mapset) run_grass_cmd("g.gisenv", set="OVERWRITE=1") os.environ['GRASS_VERBOSE'] = "0" # Only errors and warnings are printed
def run_analysis(): """Run Climate Linkage Mapper analysis""" import cc_grass_cwd # Cannot import until configured zonal_tbl = "zstats.dbf" cc_copy_inputs() # Clip inputs and create project area raster # Get zonal statistics for cores and climate lm_util.gprint("\nCALCULATING ZONAL STATISTICS FROM CLIMATE RASTER") climate_stats = arcpy.sa.ZonalStatisticsAsTable( cc_env.prj_core_fc, cc_env.core_fld, cc_env.prj_climate_rast, zonal_tbl, "DATA", "ALL") # Create core pairings table and limit based upon climate threshold core_pairings = create_pair_tbl(climate_stats) # Generate link table, calculate CWD and run Linkage Mapper if int(arcpy.GetCount_management(core_pairings).getOutput(0)) == 0: arcpy.AddWarning("\nNo core pairs within climate threshold. " "Program will end") else: # Process pairings and generate link table grass_cores = process_pairings(core_pairings) if not grass_cores: arcpy.AddWarning("\nNo core pairs within Euclidean distances. " "Progam program will end") else: # Create CWD using Grass cc_grass_cwd.grass_cwd(grass_cores) # Run Linkage Mapper lm_util.gprint("\nRUNNING LINKAGE MAPPER " "TO CREATE CLIMATE CORRIDORS") lm_master.lm_master()
def simplify_corefc(): """Simplify core feature class""" lm_util.gprint("Simplifying polygons to speed up core pair " "distance calculations") corefc = cc_env.core_simp climate_rast = arcpy.Raster(cc_env.prj_climate_rast) tolerance = climate_rast.meanCellHeight / 3 arcpy.cartography.SimplifyPolygon( cc_env.prj_core_fc, corefc, "POINT_REMOVE", tolerance, "#", "NO_CHECK") return corefc
def config_lm(): """Configure Linkage Mapper""" lm_arg = (_SCRIPT_NAME, cc_env.proj_dir, cc_env.prj_core_fc, cc_env.core_fld, cc_env.prj_resist_rast, "false", "false", "#", "#", "true", "false", cc_env.prune_network, cc_env.max_nn, cc_env.nn_unit, cc_env.keep_constelations, "true", "#", "#", "#") lm_env.configure(lm_env.TOOL_CC, lm_arg) lm_util.create_dir(lm_env.DATAPASSDIR) lm_util.gprint('\nClimate Linkage Mapper Version ' + lm_env.releaseNum) lm_util.gprint('NOTE: This tool runs best with BACKGROUND ' 'PROCESSING (see user guide).')
def process_pairings(pairings): """Limit core pairings based on distance inputs and create linkage table Requires ArcInfo license. """ lm_util.gprint("\nLIMITING CORE PAIRS BASED ON INPUTED DISTANCES AND " "GENERATING LINK TABLE") # Simplify cores based on booolean in config if cc_env.simplify_cores: corefc = simplify_corefc() else: corefc = cc_env.prj_core_fc core_pairs, frm_cores = pairs_from_list(pairings) # Create link table core_list = create_lnk_tbl(corefc, core_pairs, frm_cores) return sorted(core_list)
def limit_cores(pair_tbl, stats_tbl): """Limit core pairs based upon climate threshold""" pair_vw = "dist_tbvw" stats_vw = "stats_tbvw" core_id = cc_env.core_fld.upper() try: lm_util.gprint("\nLIMITING CORE PAIRS BASED UPON CLIMATE " "THRESHOLD") arcpy.MakeTableView_management(pair_tbl, pair_vw) arcpy.MakeTableView_management(stats_tbl, stats_vw) # Add basic stats to distance table lm_util.gprint("Joining zonal statistics to pairings table") add_stats(stats_vw, core_id, "fr", pair_vw, TO_COL) add_stats(stats_vw, core_id, "to", pair_vw, FR_COL) # Calculate difference of 2 std lm_util.gprint("Calculating difference of 2 std") diffu_2std = "diffu_2std" arcpy.AddField_management(pair_vw, diffu_2std, "Float", "", "", "", "", "NULLABLE") arcpy.CalculateField_management(pair_vw, diffu_2std, "abs(!frumin2std! - !toumin2std!)", "PYTHON") # Filter distance table based on inputed threshold and delete rows lm_util.gprint("Filtering table based on threshold") diffu2std_fld = arcpy.AddFieldDelimiters(pair_vw, diffu_2std) expression = diffu2std_fld + " <= " + str(cc_env.climate_threshold) arcpy.SelectLayerByAttribute_management(pair_vw, "NEW_SELECTION", expression) rows_del = int(arcpy.GetCount_management(pair_vw).getOutput(0)) if rows_del > 0: arcpy.DeleteRows_management(pair_vw) lm_util.gprint(str(rows_del) + " rows deleted") except Exception: raise finally: cc_util.delete_features([stats_vw, pair_vw])
def pair_cores(cpair_tbl): """Create table with all possible core to core combinations""" srows, outputrow, irows = None, None, None try: lm_util.gprint("\nCREATING CORE PAIRINGS TABLE") arcpy.CreateTable_management(cc_env.out_dir, cpair_tbl, "", "") arcpy.AddField_management(cpair_tbl, FR_COL, "Long", "", "", "", "", "NON_NULLABLE") arcpy.AddField_management(cpair_tbl, TO_COL, "Long", "", "", "", "", "NON_NULLABLE") arcpy.DeleteField_management(cpair_tbl, "Field1") srows = arcpy.SearchCursor(cc_env.prj_core_fc, "", "", cc_env.core_fld, cc_env.core_fld + " A") cores_list = [srow.getValue(cc_env.core_fld) for srow in srows] cores_product = list(itertools.combinations(cores_list, 2)) lm_util.gprint("There are " + str(len(cores_list)) + " unique " "cores and " + str(len(cores_product)) + " pairings") irows = arcpy.InsertCursor(cpair_tbl) for nrow in cores_product: outputrow = irows.newRow() outputrow.setValue(FR_COL, int(nrow[0])) outputrow.setValue(TO_COL, int(nrow[1])) irows.insertRow(outputrow) return cpair_tbl except Exception: raise finally: if srows: del srows if outputrow: del outputrow if irows: del irows
def main(argv=None): """Main function for Climate Linkage Mapper tool""" start_time = datetime.now() print "Start time: %s" % start_time.strftime(TFORMAT) if argv is None: argv = sys.argv try: cc_env.configure(argv) cc_util.check_cc_project_dir() grass_dir_setup() cc_util.gdal_fail_check() # Make sure no dll conflict check_out_sa_license() arc_wksp_setup() config_lm() log_setup() run_analysis() except arcpy.ExecuteError: msg = arcpy.GetMessages(2) arcpy.AddError(arcpy.GetMessages(2)) lm_util.write_log(msg) exc_traceback = sys.exc_info()[2] lm_util.gprint("Traceback (most recent call last):\n" + "".join(traceback.format_tb(exc_traceback)[:-1])) except Exception: exc_value, exc_traceback = sys.exc_info()[1:] arcpy.AddError(exc_value) lm_util.gprint("Traceback (most recent call last):\n" + "".join(traceback.format_tb(exc_traceback))) finally: delete_proj_files() arcpy.CheckInExtension("Spatial") print_runtime(start_time)
def gdal_fail_check(): """Code to check GDAL dlls and system path""" gdal = subprocess.Popen("where gdal*", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).stdout.read() if gdal != '': gdal_list = gdal.split('\n') if 'arcgis' in gdal_list[1].lower(): lm_util.gprint("\nGDAL DLL/s found at: " + gdal) arcpy.AddWarning("It looks like there is a conflict between " "ArcGIS") arcpy.AddWarning("and GRASS. This could be the result of a " "previous ") arcpy.AddWarning("analysis (like a Linkage Mapper run) or it might" "be") arcpy.AddWarning("caused by conflicts with pre-loaded ArcGIS ") arcpy.AddWarning("extensions.") arcpy.AddWarning("\nThis error often goes away if you run the tool" "in") arcpy.AddWarning("the background (see user guide). ") arcpy.AddWarning("\nIf that doesn't work, try restarting ArcMap.") arcpy.AddWarning("\nIf you still get an error, then restart again") arcpy.AddWarning(" and disable any extensions you are not using") arcpy.AddWarning("(Click on Customize >> Extensions).") arcpy.AddWarning("\nAnd if that doesn't work try closing Arc and ") arcpy.AddWarning("instead run the tool using the " "'CC Run Script.py' ") arcpy.AddWarning("python script. This script can be found in " "the ") arcpy.AddWarning("'demo' directory, located where the Linkage") arcpy.AddWarning("Mapper toolbox is installed.\n") raise Exception("ArcGIS-GRASS GDAL DLL conflict") else: lm_util.gprint("GDAL DLL/s not found in system PATH") arcpy.AddWarning("Check if the appropriate version of GRASS is " "correctly installed.") raise Exception("GRASS DLL/s not found")
def grass_cwd(core_list): """Creating CWD and Back rasters using GRASS r.walk function""" cur_path = subprocess.Popen("echo %PATH%", stdout=subprocess.PIPE, shell=True).stdout.read() gisdbase = os.path.join(cc_env.proj_dir, "gwksp") ccr_grassrc = os.path.join(cc_env.proj_dir, "ccr_grassrc") climate_asc = os.path.join(cc_env.out_dir, "cc_climate.asc") resist_asc = os.path.join(cc_env.out_dir, "cc_resist.asc") core_asc = os.path.join(cc_env.out_dir, "cc_cores.asc") climate_lyr = "climate" resist_lyr = "resist" core_lyr = "cores" try: lm_util.gprint("\nRUNNING GRASS TO CREATE COST-WEIGHTED DISTANCE " "RASTERS") # Convert input GRID rasters to ASCII lm_util.gprint("Converting ARCINFO GRID rasters to ASCII") # Note: consider moving these to main: arcpy.RasterToASCII_conversion(cc_env.prj_climate_rast, climate_asc) arcpy.RasterToASCII_conversion(cc_env.prj_resist_rast, resist_asc) arcpy.RasterToASCII_conversion(cc_env.prj_core_rast, core_asc) # Create resource file and setup workspace write_grassrc(ccr_grassrc, gisdbase) setup_wrkspace(gisdbase, ccr_grassrc, climate_asc) # Make cwd folder for Linkage Mapper lm_util.make_cwd_paths(max(core_list)) # Import files into GRASS lm_util.gprint("Importing raster files into GRASS") run_grass_cmd("r.in.arc", input=climate_asc, output=climate_lyr) run_grass_cmd("r.in.arc", input=resist_asc, output=resist_lyr) run_grass_cmd("r.in.arc", input=core_asc, output=core_lyr) # Generate CWD and Back rasters gen_cwd_back(core_list, climate_lyr, resist_lyr, core_lyr) except Exception: raise finally: os.environ['PATH'] = cur_path # Revert to original windows path if not cc_util.remove_grass_wkspc(gisdbase): arcpy.AddWarning("Unable to delete temporary GRASS folder. " "Program will contine.") cc_util.delete_features( [climate_asc, resist_asc, core_asc, ccr_grassrc])
def gen_cwd_back(core_list, climate_lyr, resist_lyr, core_lyr): """"Generate CWD and back rasters using r.walk in GRASS""" slope_factor = "1" walk_coeff_flat = "1" walk_coeff_uphill = str(cc_env.climate_cost) walk_coeff_downhill = str(cc_env.climate_cost * -1) walk_coeff = (walk_coeff_flat + "," + walk_coeff_uphill + "," + walk_coeff_downhill + "," + walk_coeff_downhill) focal_core_rast = "focal_core_rast" gcwd = "gcwd" gback = "gback" gbackrc = "gbackrc" core_points = "corepoints" no_cores = str(len(core_list)) # Map from directional degree output from GRASS to Arc's 1 to 8 directions # format. See r.walk source code and ArcGIS's 'Understanding cost distance # analysis' help page. rc_rules = "180=5\n225=4\n270=3\n315=2\n360=1\n45=8\n90=7\n135=6" try: for position, core_no in enumerate(core_list): core_no_txt = str(core_no) lm_util.gprint("Generating CWD and back rasters for Core " + core_no_txt + " (" + str(position + 1) + "/" + no_cores + ")") # Pull out focal core for cwd analysis write_grass_cmd("r.reclass", input=core_lyr, output=focal_core_rast, overwrite=True, rules="-", stdin=core_no_txt + '=' + core_no_txt) # Converting raster core to point feature run_grass_cmd("r.to.vect", flags="z", input=focal_core_rast, output=core_points, type="point") # Running r.walk to create CWD and back raster run_grass_cmd("r.walk", elevation=climate_lyr, friction=resist_lyr, output=gcwd, outdir=gback, start_points=core_points, walk_coeff=walk_coeff, slope_factor=slope_factor) # Reclassify back raster directional degree output to ArcGIS format write_grass_cmd("r.reclass", input=gback, output=gbackrc, rules="-", stdin=rc_rules) # Get spatial reference for defining ARCINFO raster projections desc_data = arcpy.Describe(cc_env.prj_core_rast) spatial_ref = desc_data.spatialReference # Get cwd path (e.g. ..\datapass\cwd\cw\cwd_3) cwd_path = lm_util.get_cwd_path(core_no) def create_arcgrid(rtype, grass_grid): """Export GRASS raster to ASCII grid and then to ARCINFO grid """ ascii_grid = os.path.join(cc_env.out_dir, rtype + core_no_txt + ".asc") arc_grid = cwd_path.replace("cwd_", rtype) run_grass_cmd("r.out.arc", input=grass_grid, output=ascii_grid) arcpy.CopyRaster_management(ascii_grid, arc_grid) arcpy.DefineProjection_management(arc_grid, spatial_ref) os.remove(ascii_grid) create_arcgrid("cwd_", gcwd) # Export CWD raster create_arcgrid("back_", gbackrc) # Export reclassified back raster except Exception: raise
def gen_cwd_back(core_list, climate_lyr, resist_lyr, core_lyr): """"Generate CWD and back rasters using r.walk in GRASS""" slope_factor = "1" walk_coeff_flat = "1" walk_coeff_uphill = str(cc_env.climate_cost) walk_coeff_downhill = str(cc_env.climate_cost * -1) walk_coeff = (walk_coeff_flat + "," + walk_coeff_uphill + "," + walk_coeff_downhill + "," + walk_coeff_downhill) focal_core_rast = "focal_core_rast" gcwd = "gcwd" gback = "gback" gbackrc = "gbackrc" core_points = "corepoints" no_cores = str(len(core_list)) # Map from directional degree output from GRASS to Arc's 1 to 8 directions # format. See r.walk source code and ArcGIS's 'Understanding cost distance # analysis' help page. rc_rules = "180=5\n225=4\n270=3\n315=2\n360=1\n45=8\n90=7\n135=6" try: for position, core_no in enumerate(core_list): core_no_txt = str(core_no) lm_util.gprint("Generating CWD and back rasters for Core " + core_no_txt + " (" + str(position + 1) + "/" + no_cores + ")") # Pull out focal core for cwd analysis write_grass_cmd("r.reclass", input=core_lyr, output=focal_core_rast, overwrite=True, rules="-", stdin=core_no_txt + '=' + core_no_txt) # Converting raster core to point feature run_grass_cmd("r.to.vect", flags="z", input=focal_core_rast, output=core_points, type="point") # Running r.walk to create CWD and back raster run_grass_cmd("r.walk", elevation=climate_lyr, friction=resist_lyr, output=gcwd, outdir=gback, start_points=core_points, walk_coeff=walk_coeff, slope_factor=slope_factor) # Reclassify back raster directional degree output to ArcGIS format write_grass_cmd("r.reclass", input=gback, output=gbackrc, rules="-", stdin=rc_rules) # Get spatial reference for defining ARCINFO raster projections desc_data = arcpy.Describe(cc_env.prj_core_rast) spatial_ref = desc_data.spatialReference # Get cwd path (e.g. ..\datapass\cwd\cw\cwd_3) cwd_path = lm_util.get_cwd_path(core_no) def create_arcgrid(rtype, grass_grid): """Export GRASS raster to ASCII grid and then to ARCINFO grid """ ascii_grid = os.path.join(cc_env.scratch_dir, rtype + core_no_txt + ".asc") arc_grid = cwd_path.replace("cwd_", rtype) run_grass_cmd("r.out.gdal", input=grass_grid, output=ascii_grid, format="AAIGrid") arcpy.CopyRaster_management(ascii_grid, arc_grid) arcpy.DefineProjection_management(arc_grid, spatial_ref) cc_util.arc_delete(ascii_grid) create_arcgrid("cwd_", gcwd) # Export CWD raster create_arcgrid("back_", gbackrc) # Export reclassified back raster except Exception: raise
def create_lnk_tbl(corefc, core_pairs, frm_cores): """Create link table file and limit based on near table results""" fcore_vw = "fcore_vw" tcore_vw = "tcore_vw" jtocore_fn = cc_env.core_fld[:8] + "_1" # dbf field length near_tbl = os.path.join(cc_env.out_dir, "neartbl.dbf") link_file = os.path.join(lm_env.DATAPASSDIR, "linkTable_s2.csv") link_tbl, srow, srows = None, None, None try: link_tbl = open(link_file, 'wb') writer = csv.writer(link_tbl, delimiter=',') headings = ["# link", "coreId1", "coreId2", "cluster1", "cluster2", "linkType", "eucDist", "lcDist", "eucAdj", "cwdAdj"] writer.writerow(headings) core_list = set() no_cores = str(len(frm_cores)) i = 1 coreid_fld = arcpy.AddFieldDelimiters(corefc, cc_env.core_fld) for core_no, frm_core in enumerate(frm_cores): # From cores expression = coreid_fld + " = " + frm_core arcpy.MakeFeatureLayer_management(corefc, fcore_vw, expression) # To cores to_cores_lst = [x[1] for x in core_pairs if frm_core == x[0]] to_cores = ', '.join(to_cores_lst) expression = coreid_fld + " in (" + to_cores + ")" arcpy.MakeFeatureLayer_management(corefc, tcore_vw, expression) lm_util.gprint("Calculating Euclidean distance/s from Core " + frm_core + " to " + str(len(to_cores_lst)) + " other cores" + " (" + str(core_no + 1) + "/" + no_cores + ")") # Generate near table for these core pairings arcpy.GenerateNearTable_analysis( fcore_vw, tcore_vw, near_tbl, cc_env.max_euc_dist, "NO_LOCATION", "NO_ANGLE", "ALL") # Join near table to core table arcpy.JoinField_management(near_tbl, "IN_FID", corefc, "FID", cc_env.core_fld) arcpy.JoinField_management(near_tbl, "NEAR_FID", corefc, "FID", cc_env.core_fld) # Limit pairings based on inputed Euclidean distances srow, srows = None, None euc_dist_fld = arcpy.AddFieldDelimiters(near_tbl, "NEAR_DIST") expression = (euc_dist_fld + " > " + str(cc_env.min_euc_dist)) srows = arcpy.SearchCursor(near_tbl, expression, "", jtocore_fn + "; NEAR_DIST", jtocore_fn + " A; NEAR_DIST A") # Process near table and output into a link table srow = srows.next() if srow: core_list.add(int(frm_core)) while srow: to_coreid = srow.getValue(jtocore_fn) dist_value = srow.getValue("NEAR_DIST") writer.writerow([i, frm_core, to_coreid, -1, -1, 1, dist_value, -1, -1, -1]) core_list.add(to_coreid) srow = srows.next() i += 1 except Exception: raise finally: cc_util.delete_features( [near_tbl, os.path.splitext(corefc)[0] + "_Pnt.shp"]) if link_tbl: link_tbl.close() if srow: del srow if srows: del srows return core_list
def create_lnk_tbl(corefc, core_pairs, frm_cores): """Create link table file and limit based on near table results""" # Temporary query layers fcore_vw = "fcore_vw" tcore_vw = "tcore_vw" # No output if near table in gdb need to use dbf instead near_tbl = os.path.join(cc_env.scratch_dir, "neartbl.dbf") jtocore_fn = cc_env.core_fld[:8] + "_1" # dbf field length link_file = os.path.join(lm_env.DATAPASSDIR, "linkTable_s2.csv") link_tbl, srow, srows = None, None, None try: link_tbl = open(link_file, 'wb') writer = csv.writer(link_tbl, delimiter=',') headings = [ "# link", "coreId1", "coreId2", "cluster1", "cluster2", "linkType", "eucDist", "lcDist", "eucAdj", "cwdAdj" ] writer.writerow(headings) core_list = set() no_cores = str(len(frm_cores)) i = 1 coreid_fld = arcpy.AddFieldDelimiters(corefc, cc_env.core_fld) for core_no, frm_core in enumerate(frm_cores): # From cores expression = coreid_fld + " = " + frm_core arcpy.MakeFeatureLayer_management(corefc, fcore_vw, expression) # To cores to_cores_lst = [x[1] for x in core_pairs if frm_core == x[0]] to_cores = ', '.join(to_cores_lst) expression = coreid_fld + " in (" + to_cores + ")" arcpy.MakeFeatureLayer_management(corefc, tcore_vw, expression) lm_util.gprint("Calculating Euclidean distance/s from Core " + frm_core + " to " + str(len(to_cores_lst)) + " other cores" + " (" + str(core_no + 1) + "/" + no_cores + ")") # Generate near table for these core pairings arcpy.GenerateNearTable_analysis(fcore_vw, tcore_vw, near_tbl, cc_env.max_euc_dist, "NO_LOCATION", "NO_ANGLE", "ALL") # Join near table to core table arcpy.JoinField_management(near_tbl, "IN_FID", corefc, "OBJECTID", cc_env.core_fld) arcpy.JoinField_management(near_tbl, "NEAR_FID", corefc, "OBJECTID", cc_env.core_fld) # Limit pairings based on inputed Euclidean distances srow, srows = None, None euc_dist_fld = arcpy.AddFieldDelimiters(near_tbl, "NEAR_DIST") expression = (euc_dist_fld + " > " + str(cc_env.min_euc_dist)) srows = arcpy.SearchCursor(near_tbl, where_clause=expression, fields=jtocore_fn + "; NEAR_DIST", sort_fields=jtocore_fn + " A; NEAR_DIST A") # Process near table and output into a link table srow = srows.next() if srow: core_list.add(int(frm_core)) while srow: to_coreid = srow.getValue(jtocore_fn) dist_value = srow.getValue("NEAR_DIST") writer.writerow([ i, frm_core, to_coreid, -1, -1, 1, dist_value, -1, -1, -1 ]) core_list.add(to_coreid) srow = srows.next() i += 1 except Exception: raise finally: cc_util.arc_delete(near_tbl) if link_tbl: link_tbl.close() if srow: del srow if srows: del srows return core_list
def do_radius_loop(): """Do radius loop.""" link_table = link_table_tmp.copy() start_time = time.clock() link_loop = 0 pct_done = 0 gprint('\nMapping barriers at a radius of ' + str(radius) + ' ' + str(map_units)) if cfg.SUM_BARRIERS: gprint('using SUM method') else: gprint('using MAXIMUM method') if num_corridor_links > 1: gprint('0 percent done') last_mosaic_ras = None last_mosaic_ras_pct = None for x in range(0, num_links): pct_done = lu.report_pct_done( link_loop, num_corridor_links, pct_done) if ((link_table[x, cfg.LTB_LINKTYPE] > 0) and (link_table[x, cfg.LTB_LINKTYPE] < 1000)): link_loop = link_loop + 1 # source and target cores corex = int(core_list[x, 0]) corey = int(core_list[x, 1]) # Get cwd rasters for source and target cores cwd_ras1 = lu.get_cwd_path(corex) cwd_ras2 = lu.get_cwd_path(corey) # Mask out areas above CWD threshold cwd_tmp1 = None cwd_tmp2 = None if cfg.BARRIER_CWD_THRESH is not None: if x == 1: lu.dashline(1) gprint(' Using CWD threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.env.snapRaster = cfg.RESRAST cwd_tmp1 = path.join(cfg.SCRATCHDIR, "tmp" + str(corex)) out_con = arcpy.sa.Con( cwd_ras1 < float(cfg.BARRIER_CWD_THRESH), cwd_ras1) out_con.save(cwd_tmp1) cwd_ras1 = cwd_tmp1 cwd_tmp2 = path.join(cfg.SCRATCHDIR, "tmp" + str(corey)) out_con = arcpy.sa.Con( cwd_ras2 < float(cfg.BARRIER_CWD_THRESH), cwd_ras2) out_con.save(cwd_tmp2) cwd_ras2 = cwd_tmp2 focal_ras1 = lu.get_focal_path(corex, radius) focal_ras2 = lu.get_focal_path(corey, radius) link = lu.get_links_from_core_pairs(link_table, corex, corey) lc_dist = float(link_table[link, cfg.LTB_CWDIST]) # Detect barriers at radius using neighborhood stats # Create the Neighborhood Object inner_radius = radius - 1 outer_radius = radius dia = 2 * radius in_neighborhood = ("ANNULUS " + str(inner_radius) + " " + str(outer_radius) + " MAP") @Retry(10) def exec_focal(): """Execute focal statistics.""" if not path.exists(focal_ras1): arcpy.env.extent = cwd_ras1 out_focal_stats = arcpy.sa.FocalStatistics( cwd_ras1, in_neighborhood, "MINIMUM", "DATA") if SET_CORES_TO_NULL: # Set areas overlapping cores to NoData xxx out_focal_stats2 = arcpy.sa.Con( out_focal_stats > 0, out_focal_stats) out_focal_stats2.save(focal_ras1) else: out_focal_stats.save(focal_ras1) arcpy.env.extent = cfg.RESRAST if not path.exists(focal_ras2): arcpy.env.extent = cwd_ras2 out_focal_stats = arcpy.sa.FocalStatistics( cwd_ras2, in_neighborhood, "MINIMUM", "DATA") if SET_CORES_TO_NULL: # Set areas overlapping cores to NoData xxx out_focal_stats2 = arcpy.sa.Con( out_focal_stats > 0, out_focal_stats) out_focal_stats2.save(focal_ras2) else: out_focal_stats.save(focal_ras2) arcpy.env.extent = cfg.RESRAST exec_focal() lu.delete_data(cwd_tmp1) lu.delete_data(cwd_tmp2) barrier_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey)+'.tif') # Need to set nulls to 0, # also create trim rasters as we go if cfg.SUM_BARRIERS: out_ras = ((lc_dist - arcpy.sa.Raster(focal_ras1) - arcpy.sa.Raster(focal_ras2) - dia) / dia) out_con = arcpy.sa.Con(arcpy.sa.IsNull(out_ras), 0, out_ras) out_con2 = arcpy.sa.Con(out_con < 0, 0, out_con) out_con2.save(barrier_ras) # Execute FocalStatistics to fill out search radii in_neighborhood = ("CIRCLE " + str(outer_radius) + " MAP") fill_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_fill.tif") out_focal_stats = arcpy.sa.FocalStatistics( barrier_ras, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras) if cfg.WRITE_TRIM_RASTERS: trm_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_trim.tif") ras_list = [fill_ras, resist_fill_ras] out_cell_statistics = arcpy.sa.CellStatistics( ras_list, "MINIMUM") out_cell_statistics.save(trm_ras) else: @Retry(10) def clac_ben(): """Calculate potential benefit. Calculate potential benefit per map unit restored. """ out_ras = ( (lc_dist - arcpy.sa.Raster(focal_ras1) - arcpy.sa.Raster(focal_ras2) - dia) / dia) out_ras.save(barrier_ras) clac_ben() if cfg.WRITE_PCT_RASTERS: # Calculate % potential benefit per unit restored barrier_ras_pct = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + '_pct.tif') @Retry(10) def calc_ben_pct(): """Calc benefit percentage.""" outras = (100 * (arcpy.sa.Raster(barrier_ras) / lc_dist)) outras.save(barrier_ras_pct) calc_ben_pct() # Mosaic barrier results across core area pairs mosaic_dir = path.join(cfg.SCRATCHDIR, 'mos' + str(rad_id) + '_' + str(x + 1)) lu.create_dir(mosaic_dir) mos_fn = 'mos_temp' tmp_mosaic_ras = path.join(mosaic_dir, mos_fn) tmp_mosaic_ras_trim = path.join(mosaic_dir, 'mos_temp_trm') arcpy.env.workspace = mosaic_dir if link_loop == 1: last_mosaic_ras_trim = None # For first grid copy rather than mosaic arcpy.CopyRaster_management(barrier_ras, tmp_mosaic_ras) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: arcpy.CopyRaster_management( trm_ras, tmp_mosaic_ras_trim) else: if cfg.SUM_BARRIERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras) < 0, last_mosaic_ras, arcpy.sa.Raster(barrier_ras) + arcpy.sa.Raster(last_mosaic_ras)) out_con.save(tmp_mosaic_ras) if cfg.WRITE_TRIM_RASTERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(trm_ras) < 0, last_mosaic_ras_trim, arcpy.sa.Raster(trm_ras) + arcpy.sa.Raster(last_mosaic_ras_trim) ) out_con.save(tmp_mosaic_ras_trim) else: in_rasters = (";".join([barrier_ras, last_mosaic_ras])) @Retry(10) def mosaic_to_new(): """Mosaic to new raster.""" arcpy.MosaicToNewRaster_management( input_rasters=in_rasters, output_location=mosaic_dir, raster_dataset_name_with_extension\ =mos_fn, pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, number_of_bands="1", mosaic_method="MAXIMUM") mosaic_to_new() if link_loop > 1: # Clean up from previous loop lu.delete_data(last_mosaic_ras) last_mosaic_dir = path.dirname(last_mosaic_ras) lu.clean_out_workspace(last_mosaic_dir) lu.delete_dir(last_mosaic_dir) last_mosaic_ras = tmp_mosaic_ras if cfg.WRITE_TRIM_RASTERS: last_mosaic_ras_trim = tmp_mosaic_ras_trim if cfg.WRITE_PCT_RASTERS: mos_pct_fn = 'mos_temp_pct' mosaic_dir_pct = path.join(cfg.SCRATCHDIR, 'mosP' + str(rad_id) + '_' + str(x+1)) lu.create_dir(mosaic_dir_pct) tmp_mosaic_ras_pct = path.join(mosaic_dir_pct, mos_pct_fn) if link_loop == 1: # If this is the first grid then copy # rather than mosaic if cfg.SUM_BARRIERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras_pct) < 0, 0, arcpy.sa.Con(arcpy.sa.IsNull (barrier_ras_pct), 0, barrier_ras_pct)) out_con.save(tmp_mosaic_ras_pct) else: arcpy.CopyRaster_management( barrier_ras_pct, tmp_mosaic_ras_pct) else: if cfg.SUM_BARRIERS: @Retry(10) def sum_barriers(): """Sum barriers.""" out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras_pct) < 0, last_mosaic_ras_pct, arcpy.sa.Raster(barrier_ras_pct) + arcpy.sa.Raster( last_mosaic_ras_pct)) out_con.save(tmp_mosaic_ras_pct) sum_barriers() else: in_rasters = (";".join([barrier_ras_pct, last_mosaic_ras_pct])) @Retry(10) def max_barriers(): """Get max barriers.""" arcpy.MosaicToNewRaster_management( input_rasters=in_rasters, output_location=mosaic_dir_pct, raster_dataset_name_with_extension =mos_pct_fn, pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, number_of_bands="1", mosaic_method="MAXIMUM") max_barriers() if link_loop > 1: # Clean up from previous loop lu.delete_data(last_mosaic_ras_pct) last_mosaic_dir_pct = path.dirname( last_mosaic_ras_pct) lu.clean_out_workspace(last_mosaic_dir_pct) lu.delete_dir(last_mosaic_dir_pct) last_mosaic_ras_pct = tmp_mosaic_ras_pct if not cfg.SAVEBARRIERRASTERS: lu.delete_data(barrier_ras) if cfg.WRITE_PCT_RASTERS: lu.delete_data(barrier_ras_pct) if cfg.WRITE_TRIM_RASTERS: lu.delete_data(trm_ras) # Temporarily disable links in linktable - # don't want to mosaic them twice for y in range(x + 1, num_links): corex1 = int(core_list[y, 0]) corey1 = int(core_list[y, 1]) if corex1 == corex and corey1 == corey: link_table[y, cfg.LTB_LINKTYPE] = ( link_table[y, cfg.LTB_LINKTYPE] + 1000) elif corex1 == corey and corey1 == corex: link_table[y, cfg.LTB_LINKTYPE] = ( link_table[y, cfg.LTB_LINKTYPE] + 1000) if num_corridor_links > 1 and pct_done < 100: gprint('100 percent done') gprint('Summarizing barrier data for search radius.') # Rows that were temporarily disabled rows = npy.where(link_table[:, cfg.LTB_LINKTYPE] > 1000) link_table[rows, cfg.LTB_LINKTYPE] = ( link_table[rows, cfg.LTB_LINKTYPE] - 1000) # ----------------------------------------------------------------- # Set negative values to null or zero and write geodatabase. mosaic_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" + str(radius)) mosaic_ras = path.join(cfg.BARRIERGDB, mosaic_fn) arcpy.env.extent = cfg.RESRAST out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras, tmp_mosaic_ras, "VALUE < 0") # xxx orig out_set_null.save(mosaic_ras) lu.delete_data(tmp_mosaic_ras) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: mosaic_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + "_Rad" + str(radius)) mosaic_ras_trim = path.join(cfg.BARRIERGDB, mosaic_fn) arcpy.CopyRaster_management(tmp_mosaic_ras_trim, mosaic_ras_trim) lu.delete_data(tmp_mosaic_ras) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster mosaic_pct_fn = (prefix + "_BarrierCenters_Pct" + sum_suffix + "_Rad" + str(radius)) arcpy.env.extent = cfg.RESRAST out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras_pct, tmp_mosaic_ras_pct, "VALUE < 0") mosaic_ras_pct = path.join(cfg.BARRIERGDB, mosaic_pct_fn) out_set_null.save(mosaic_ras_pct) lu.delete_data(tmp_mosaic_ras_pct) # 'Grow out' maximum restoration gain to # neighborhood size for display in_neighborhood = "CIRCLE " + str(outer_radius) + " MAP" # Execute FocalStatistics fill_ras_fn = "barriers_fill" + str(outer_radius) + TIF fill_ras = path.join(cfg.BARRIERBASEDIR, fill_ras_fn) out_focal_stats = arcpy.sa.FocalStatistics( mosaic_ras, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster fill_ras_pct_fn = ( "barriers_fill_pct" + str(outer_radius) + TIF) fill_ras_pct = path.join(cfg.BARRIERBASEDIR, fill_ras_pct_fn) out_focal_stats = arcpy.sa.FocalStatistics( mosaic_ras_pct, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras_pct) # Place copies of filled rasters in output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fill_ras_fn = (prefix + "_BarrrierCircles" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(fill_ras, fill_ras_fn) if cfg.WRITE_PCT_RASTERS: fill_ras_pct_fn = (prefix + "_BarrrierCircles_Pct" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(fill_ras_pct, fill_ras_pct_fn) if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: # Create pared-down version of filled raster- remove pixels # that don't need restoring by allowing a pixel to only # contribute its resistance value to restoration gain out_ras_fn = "barriers_trm" + str(outer_radius) + TIF out_ras = path.join(cfg.BARRIERBASEDIR, out_ras_fn) ras_list = [fill_ras, resist_fill_ras] out_cell_statistics = arcpy.sa.CellStatistics(ras_list, "MINIMUM") out_cell_statistics.save(out_ras) # SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER out_ras_2fn = ("barriers_trm" + sum_suffix + str(outer_radius) + "_2" + TIF) out_ras2 = path.join(cfg.BARRIERBASEDIR, out_ras_2fn) output = arcpy.sa.Con(arcpy.sa.IsNull(fill_ras), fill_ras, out_ras) output.save(out_ras2) out_ras_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(out_ras2, out_ras_fn) start_time = lu.elapsed_time(start_time)
def step6_calc_barriers(): """Detect influential barriers given CWD calculations from Step 3.""" try: arcpy.CheckOutExtension("spatial") lu.dashline(0) gprint('Running script ' + _SCRIPT_NAME) if cfg.BARRIER_CWD_THRESH is not None: lu.dashline(1) gprint('Invoking CWD Threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') if cfg.SUM_BARRIERS: sum_suffix = '_Sum' cfg.BARRIERBASEDIR = cfg.BARRIERBASEDIR + sum_suffix base_name, extension = path.splitext(cfg.BARRIERGDB) cfg.BARRIERGDB = base_name + sum_suffix + extension gprint('\nBarrier scores will be SUMMED across core pairs.') else: sum_suffix = '' if not arcpy.Exists(cfg.BARRIERGDB): # Create output geodatabase arcpy.CreateFileGDB_management(cfg.OUTPUTDIR, path.basename(cfg.BARRIERGDB)) start_radius = int(cfg.STARTRADIUS) end_radius = int(cfg.ENDRADIUS) radius_step = int(cfg.RADIUSSTEP) if radius_step == 0: end_radius = start_radius # Calculate at just one radius value radius_step = 1 link_table_file = lu.get_prev_step_link_table(step=6) arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR prefix = path.basename(cfg.PROJECTDIR) # For speed: arcpy.env.pyramid = "NONE" arcpy.env.rasterStatistics = "NONE" # set the analysis extent and cell size to that of the resistance # surface arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = arcpy.Describe(cfg.RESRAST).MeanCellHeight arcpy.env.snapRaster = cfg.RESRAST spatialref = arcpy.Describe(cfg.RESRAST).spatialReference map_units = (str(spatialref.linearUnitName)).lower() if len(map_units) > 1 and map_units[-1] != 's': map_units = map_units + 's' if (float(arcpy.env.cellSize) > start_radius or start_radius > end_radius): msg = ('Error: minimum detection radius must be greater than ' 'cell size (' + arcpy.env.cellSize + ') \nand less than or equal to maximum detection radius.') lu.raise_error(msg) link_table = lu.load_link_table(link_table_file) num_links = link_table.shape[0] num_corridor_links = lu.report_links(link_table) if num_corridor_links == 0: lu.dashline(1) msg = '\nThere are no linkages. Bailing.' lu.raise_error(msg) # set up directories for barrier and barrier mosaic grids gprint("Creating intermediate output folder: " + cfg.BARRIERBASEDIR) lu.delete_dir(cfg.BARRIERBASEDIR) lu.create_dir(cfg.BARRIERBASEDIR) arcpy.CreateFolder_management(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM) cbarrierdir = path.join(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM) cores_to_process = npy.unique( link_table[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) max_core_num = max(cores_to_process) # Set up focal directories. # To keep there from being > 100 grids in any one directory, # outputs are written to: # barrier\focalX_ for cores 1-99 at radius X # barrier\focalX_1 for cores 100-199 # etc. lu.dashline(0) for radius in range(start_radius, end_radius + 1, radius_step): core1path = lu.get_focal_path(1, radius) path1 = path.split(core1path)[0] path2, dir2 = path.split(path1) arcpy.CreateFolder_management(path.dirname(path2), path.basename(path2)) arcpy.CreateFolder_management(path.dirname(path1), path.basename(path1)) if max_core_num > 99: gprint('Creating subdirectories for ' + str(radius) + ' ' + str(map_units) + ' radius analysis scale.') focal_dir_base_name = dir2 cp100 = cores_to_process.astype('int32') // 100 ind = npy.where(cp100 > 0) dir_nums = npy.unique(cp100[ind]) for dir_num in dir_nums: focal_dir = focal_dir_base_name + str(dir_num) gprint('...' + focal_dir) arcpy.CreateFolder_management(path2, focal_dir) # Create resistance raster with filled-in Nodata values for later use arcpy.env.extent = cfg.RESRAST resist_fill_ras = path.join(cfg.SCRATCHDIR, "resist_fill") output = arcpy.sa.Con(arcpy.sa.IsNull(cfg.RESRAST), 1000000000, arcpy.sa.Raster(cfg.RESRAST) - 1) output.save(resist_fill_ras) core_list = link_table[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1] core_list = npy.sort(core_list) # Loop through each search radius to calculate barriers in each link rad_id = 0 # Keep track of no of radii processed - used for temp dir for radius in range(start_radius, end_radius + 1, radius_step): rad_id = rad_id + 1 link_table_tmp = link_table.copy() @Retry(10) # Can't pass vars in and modify them. def do_radius_loop(): """Do radius loop.""" link_table = link_table_tmp.copy() start_time = time.clock() link_loop = 0 pct_done = 0 gprint('\nMapping barriers at a radius of ' + str(radius) + ' ' + str(map_units)) if cfg.SUM_BARRIERS: gprint('using SUM method') else: gprint('using MAXIMUM method') if num_corridor_links > 1: gprint('0 percent done') last_mosaic_ras = None last_mosaic_ras_pct = None for x in range(0, num_links): pct_done = lu.report_pct_done( link_loop, num_corridor_links, pct_done) if ((link_table[x, cfg.LTB_LINKTYPE] > 0) and (link_table[x, cfg.LTB_LINKTYPE] < 1000)): link_loop = link_loop + 1 # source and target cores corex = int(core_list[x, 0]) corey = int(core_list[x, 1]) # Get cwd rasters for source and target cores cwd_ras1 = lu.get_cwd_path(corex) cwd_ras2 = lu.get_cwd_path(corey) # Mask out areas above CWD threshold cwd_tmp1 = None cwd_tmp2 = None if cfg.BARRIER_CWD_THRESH is not None: if x == 1: lu.dashline(1) gprint(' Using CWD threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.') arcpy.env.extent = cfg.RESRAST arcpy.env.cellSize = cfg.RESRAST arcpy.env.snapRaster = cfg.RESRAST cwd_tmp1 = path.join(cfg.SCRATCHDIR, "tmp" + str(corex)) out_con = arcpy.sa.Con( cwd_ras1 < float(cfg.BARRIER_CWD_THRESH), cwd_ras1) out_con.save(cwd_tmp1) cwd_ras1 = cwd_tmp1 cwd_tmp2 = path.join(cfg.SCRATCHDIR, "tmp" + str(corey)) out_con = arcpy.sa.Con( cwd_ras2 < float(cfg.BARRIER_CWD_THRESH), cwd_ras2) out_con.save(cwd_tmp2) cwd_ras2 = cwd_tmp2 focal_ras1 = lu.get_focal_path(corex, radius) focal_ras2 = lu.get_focal_path(corey, radius) link = lu.get_links_from_core_pairs(link_table, corex, corey) lc_dist = float(link_table[link, cfg.LTB_CWDIST]) # Detect barriers at radius using neighborhood stats # Create the Neighborhood Object inner_radius = radius - 1 outer_radius = radius dia = 2 * radius in_neighborhood = ("ANNULUS " + str(inner_radius) + " " + str(outer_radius) + " MAP") @Retry(10) def exec_focal(): """Execute focal statistics.""" if not path.exists(focal_ras1): arcpy.env.extent = cwd_ras1 out_focal_stats = arcpy.sa.FocalStatistics( cwd_ras1, in_neighborhood, "MINIMUM", "DATA") if SET_CORES_TO_NULL: # Set areas overlapping cores to NoData xxx out_focal_stats2 = arcpy.sa.Con( out_focal_stats > 0, out_focal_stats) out_focal_stats2.save(focal_ras1) else: out_focal_stats.save(focal_ras1) arcpy.env.extent = cfg.RESRAST if not path.exists(focal_ras2): arcpy.env.extent = cwd_ras2 out_focal_stats = arcpy.sa.FocalStatistics( cwd_ras2, in_neighborhood, "MINIMUM", "DATA") if SET_CORES_TO_NULL: # Set areas overlapping cores to NoData xxx out_focal_stats2 = arcpy.sa.Con( out_focal_stats > 0, out_focal_stats) out_focal_stats2.save(focal_ras2) else: out_focal_stats.save(focal_ras2) arcpy.env.extent = cfg.RESRAST exec_focal() lu.delete_data(cwd_tmp1) lu.delete_data(cwd_tmp2) barrier_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey)+'.tif') # Need to set nulls to 0, # also create trim rasters as we go if cfg.SUM_BARRIERS: out_ras = ((lc_dist - arcpy.sa.Raster(focal_ras1) - arcpy.sa.Raster(focal_ras2) - dia) / dia) out_con = arcpy.sa.Con(arcpy.sa.IsNull(out_ras), 0, out_ras) out_con2 = arcpy.sa.Con(out_con < 0, 0, out_con) out_con2.save(barrier_ras) # Execute FocalStatistics to fill out search radii in_neighborhood = ("CIRCLE " + str(outer_radius) + " MAP") fill_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_fill.tif") out_focal_stats = arcpy.sa.FocalStatistics( barrier_ras, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras) if cfg.WRITE_TRIM_RASTERS: trm_ras = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + "_trim.tif") ras_list = [fill_ras, resist_fill_ras] out_cell_statistics = arcpy.sa.CellStatistics( ras_list, "MINIMUM") out_cell_statistics.save(trm_ras) else: @Retry(10) def clac_ben(): """Calculate potential benefit. Calculate potential benefit per map unit restored. """ out_ras = ( (lc_dist - arcpy.sa.Raster(focal_ras1) - arcpy.sa.Raster(focal_ras2) - dia) / dia) out_ras.save(barrier_ras) clac_ben() if cfg.WRITE_PCT_RASTERS: # Calculate % potential benefit per unit restored barrier_ras_pct = path.join( cbarrierdir, "b" + str(radius) + "_" + str(corex) + "_" + str(corey) + '_pct.tif') @Retry(10) def calc_ben_pct(): """Calc benefit percentage.""" outras = (100 * (arcpy.sa.Raster(barrier_ras) / lc_dist)) outras.save(barrier_ras_pct) calc_ben_pct() # Mosaic barrier results across core area pairs mosaic_dir = path.join(cfg.SCRATCHDIR, 'mos' + str(rad_id) + '_' + str(x + 1)) lu.create_dir(mosaic_dir) mos_fn = 'mos_temp' tmp_mosaic_ras = path.join(mosaic_dir, mos_fn) tmp_mosaic_ras_trim = path.join(mosaic_dir, 'mos_temp_trm') arcpy.env.workspace = mosaic_dir if link_loop == 1: last_mosaic_ras_trim = None # For first grid copy rather than mosaic arcpy.CopyRaster_management(barrier_ras, tmp_mosaic_ras) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: arcpy.CopyRaster_management( trm_ras, tmp_mosaic_ras_trim) else: if cfg.SUM_BARRIERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras) < 0, last_mosaic_ras, arcpy.sa.Raster(barrier_ras) + arcpy.sa.Raster(last_mosaic_ras)) out_con.save(tmp_mosaic_ras) if cfg.WRITE_TRIM_RASTERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(trm_ras) < 0, last_mosaic_ras_trim, arcpy.sa.Raster(trm_ras) + arcpy.sa.Raster(last_mosaic_ras_trim) ) out_con.save(tmp_mosaic_ras_trim) else: in_rasters = (";".join([barrier_ras, last_mosaic_ras])) @Retry(10) def mosaic_to_new(): """Mosaic to new raster.""" arcpy.MosaicToNewRaster_management( input_rasters=in_rasters, output_location=mosaic_dir, raster_dataset_name_with_extension\ =mos_fn, pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, number_of_bands="1", mosaic_method="MAXIMUM") mosaic_to_new() if link_loop > 1: # Clean up from previous loop lu.delete_data(last_mosaic_ras) last_mosaic_dir = path.dirname(last_mosaic_ras) lu.clean_out_workspace(last_mosaic_dir) lu.delete_dir(last_mosaic_dir) last_mosaic_ras = tmp_mosaic_ras if cfg.WRITE_TRIM_RASTERS: last_mosaic_ras_trim = tmp_mosaic_ras_trim if cfg.WRITE_PCT_RASTERS: mos_pct_fn = 'mos_temp_pct' mosaic_dir_pct = path.join(cfg.SCRATCHDIR, 'mosP' + str(rad_id) + '_' + str(x+1)) lu.create_dir(mosaic_dir_pct) tmp_mosaic_ras_pct = path.join(mosaic_dir_pct, mos_pct_fn) if link_loop == 1: # If this is the first grid then copy # rather than mosaic if cfg.SUM_BARRIERS: out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras_pct) < 0, 0, arcpy.sa.Con(arcpy.sa.IsNull (barrier_ras_pct), 0, barrier_ras_pct)) out_con.save(tmp_mosaic_ras_pct) else: arcpy.CopyRaster_management( barrier_ras_pct, tmp_mosaic_ras_pct) else: if cfg.SUM_BARRIERS: @Retry(10) def sum_barriers(): """Sum barriers.""" out_con = arcpy.sa.Con( arcpy.sa.Raster(barrier_ras_pct) < 0, last_mosaic_ras_pct, arcpy.sa.Raster(barrier_ras_pct) + arcpy.sa.Raster( last_mosaic_ras_pct)) out_con.save(tmp_mosaic_ras_pct) sum_barriers() else: in_rasters = (";".join([barrier_ras_pct, last_mosaic_ras_pct])) @Retry(10) def max_barriers(): """Get max barriers.""" arcpy.MosaicToNewRaster_management( input_rasters=in_rasters, output_location=mosaic_dir_pct, raster_dataset_name_with_extension =mos_pct_fn, pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, number_of_bands="1", mosaic_method="MAXIMUM") max_barriers() if link_loop > 1: # Clean up from previous loop lu.delete_data(last_mosaic_ras_pct) last_mosaic_dir_pct = path.dirname( last_mosaic_ras_pct) lu.clean_out_workspace(last_mosaic_dir_pct) lu.delete_dir(last_mosaic_dir_pct) last_mosaic_ras_pct = tmp_mosaic_ras_pct if not cfg.SAVEBARRIERRASTERS: lu.delete_data(barrier_ras) if cfg.WRITE_PCT_RASTERS: lu.delete_data(barrier_ras_pct) if cfg.WRITE_TRIM_RASTERS: lu.delete_data(trm_ras) # Temporarily disable links in linktable - # don't want to mosaic them twice for y in range(x + 1, num_links): corex1 = int(core_list[y, 0]) corey1 = int(core_list[y, 1]) if corex1 == corex and corey1 == corey: link_table[y, cfg.LTB_LINKTYPE] = ( link_table[y, cfg.LTB_LINKTYPE] + 1000) elif corex1 == corey and corey1 == corex: link_table[y, cfg.LTB_LINKTYPE] = ( link_table[y, cfg.LTB_LINKTYPE] + 1000) if num_corridor_links > 1 and pct_done < 100: gprint('100 percent done') gprint('Summarizing barrier data for search radius.') # Rows that were temporarily disabled rows = npy.where(link_table[:, cfg.LTB_LINKTYPE] > 1000) link_table[rows, cfg.LTB_LINKTYPE] = ( link_table[rows, cfg.LTB_LINKTYPE] - 1000) # ----------------------------------------------------------------- # Set negative values to null or zero and write geodatabase. mosaic_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" + str(radius)) mosaic_ras = path.join(cfg.BARRIERGDB, mosaic_fn) arcpy.env.extent = cfg.RESRAST out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras, tmp_mosaic_ras, "VALUE < 0") # xxx orig out_set_null.save(mosaic_ras) lu.delete_data(tmp_mosaic_ras) if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: mosaic_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + "_Rad" + str(radius)) mosaic_ras_trim = path.join(cfg.BARRIERGDB, mosaic_fn) arcpy.CopyRaster_management(tmp_mosaic_ras_trim, mosaic_ras_trim) lu.delete_data(tmp_mosaic_ras) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster mosaic_pct_fn = (prefix + "_BarrierCenters_Pct" + sum_suffix + "_Rad" + str(radius)) arcpy.env.extent = cfg.RESRAST out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras_pct, tmp_mosaic_ras_pct, "VALUE < 0") mosaic_ras_pct = path.join(cfg.BARRIERGDB, mosaic_pct_fn) out_set_null.save(mosaic_ras_pct) lu.delete_data(tmp_mosaic_ras_pct) # 'Grow out' maximum restoration gain to # neighborhood size for display in_neighborhood = "CIRCLE " + str(outer_radius) + " MAP" # Execute FocalStatistics fill_ras_fn = "barriers_fill" + str(outer_radius) + TIF fill_ras = path.join(cfg.BARRIERBASEDIR, fill_ras_fn) out_focal_stats = arcpy.sa.FocalStatistics( mosaic_ras, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras) if cfg.WRITE_PCT_RASTERS: # Do same for percent raster fill_ras_pct_fn = ( "barriers_fill_pct" + str(outer_radius) + TIF) fill_ras_pct = path.join(cfg.BARRIERBASEDIR, fill_ras_pct_fn) out_focal_stats = arcpy.sa.FocalStatistics( mosaic_ras_pct, in_neighborhood, "MAXIMUM", "DATA") out_focal_stats.save(fill_ras_pct) # Place copies of filled rasters in output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fill_ras_fn = (prefix + "_BarrrierCircles" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(fill_ras, fill_ras_fn) if cfg.WRITE_PCT_RASTERS: fill_ras_pct_fn = (prefix + "_BarrrierCircles_Pct" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(fill_ras_pct, fill_ras_pct_fn) if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS: # Create pared-down version of filled raster- remove pixels # that don't need restoring by allowing a pixel to only # contribute its resistance value to restoration gain out_ras_fn = "barriers_trm" + str(outer_radius) + TIF out_ras = path.join(cfg.BARRIERBASEDIR, out_ras_fn) ras_list = [fill_ras, resist_fill_ras] out_cell_statistics = arcpy.sa.CellStatistics(ras_list, "MINIMUM") out_cell_statistics.save(out_ras) # SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER out_ras_2fn = ("barriers_trm" + sum_suffix + str(outer_radius) + "_2" + TIF) out_ras2 = path.join(cfg.BARRIERBASEDIR, out_ras_2fn) output = arcpy.sa.Con(arcpy.sa.IsNull(fill_ras), fill_ras, out_ras) output.save(out_ras2) out_ras_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + "_Rad" + str(outer_radius)) arcpy.CopyRaster_management(out_ras2, out_ras_fn) start_time = lu.elapsed_time(start_time) # Call the above function do_radius_loop() # Combine rasters across radii gprint('\nCreating summary rasters...') if start_radius != end_radius: radii_suffix = ('_Rad' + str(int(start_radius)) + 'To' + str(int(end_radius)) + 'Step' + str(int(radius_step))) mosaic_fn = "bar_radii" mosaic_pct_fn = "bar_radii_pct" arcpy.env.workspace = cfg.BARRIERBASEDIR for radius in range(start_radius, end_radius + 1, radius_step): # Fixme: run speed test with gdb mosaicking above and here radius_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" + str(radius)) radius_ras = path.join(cfg.BARRIERGDB, radius_fn) if radius == start_radius: # If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(radius_ras, mosaic_fn) else: mosaic_ras = path.join(cfg.BARRIERBASEDIR, mosaic_fn) arcpy.Mosaic_management(radius_ras, mosaic_ras, "MAXIMUM", "MATCH") if cfg.WRITE_PCT_RASTERS: radius_pct_fn = (prefix + "_BarrierCenters_Pct" + sum_suffix + "_Rad" + str(radius)) radius_ras_pct = path.join(cfg.BARRIERGDB, radius_pct_fn) if radius == start_radius: # If this is the first grid then copy rather than # mosaic arcpy.CopyRaster_management(radius_ras_pct, mosaic_pct_fn) else: mosaic_ras_pct = path.join(cfg.BARRIERBASEDIR, mosaic_pct_fn) arcpy.Mosaic_management(radius_ras_pct, mosaic_ras_pct, "MAXIMUM", "MATCH") # Copy results to output geodatabase arcpy.env.workspace = cfg.BARRIERGDB mosaic_fn = prefix + "_BarrierCenters" + sum_suffix + radii_suffix arcpy.CopyRaster_management(mosaic_ras, mosaic_fn) if cfg.WRITE_PCT_RASTERS: mosaic_pct_fn = (prefix + "_BarrierCenters_Pct" + sum_suffix + radii_suffix) arcpy.CopyRaster_management(mosaic_ras_pct, mosaic_pct_fn) # GROWN OUT rasters fill_mosaic_fn = "barriers_radii_fill" + TIF fill_mosaic_pct_fn = "barriers_radii_fill_pct" + TIF fill_mosaic_ras = path.join(cfg.BARRIERBASEDIR, fill_mosaic_fn) trim_mosaic_ras_pct = path.join(cfg.BARRIERBASEDIR, fill_mosaic_pct_fn) arcpy.env.workspace = cfg.BARRIERBASEDIR for radius in range(start_radius, end_radius + 1, radius_step): radius_fn = "barriers_fill" + str(radius) + TIF # fixme- do this when only a single radius too radius_ras = path.join(cfg.BARRIERBASEDIR, radius_fn) if radius == start_radius: # If this is the first grid then copy rather than mosaic arcpy.CopyRaster_management(radius_ras, fill_mosaic_fn) else: arcpy.Mosaic_management(radius_ras, fill_mosaic_ras, "MAXIMUM", "MATCH") if cfg.WRITE_PCT_RASTERS: radius_pct_fn = "barriers_fill_pct" + str(radius) + TIF # fixme- do this when only a single radius too radius_ras_pct = path.join(cfg.BARRIERBASEDIR, radius_pct_fn) if radius == start_radius: # For first grid copy rather than mosaic arcpy.CopyRaster_management(radius_ras_pct, fill_mosaic_pct_fn) else: arcpy.Mosaic_management(radius_ras_pct, trim_mosaic_ras_pct, "MAXIMUM", "MATCH") # Copy result to output geodatabase arcpy.env.workspace = cfg.BARRIERGDB fill_mosaic_fn = (prefix + "_BarrierCircles" + sum_suffix + radii_suffix) arcpy.CopyRaster_management(fill_mosaic_ras, fill_mosaic_fn) if cfg.WRITE_PCT_RASTERS: fill_mosaic_pct_fn = (prefix + "_BarrierCircles_Pct" + sum_suffix + radii_suffix) arcpy.CopyRaster_management(trim_mosaic_ras_pct, fill_mosaic_pct_fn) # GROWN OUT AND TRIMMED rasters (Can't do percent) if cfg.WRITE_TRIM_RASTERS: trim_mosaic_fn = "bar_radii_trm" arcpy.env.workspace = cfg.BARRIERBASEDIR trim_mosaic_ras = path.join(cfg.BARRIERBASEDIR, trim_mosaic_fn) for radius in range(start_radius, end_radius + 1, radius_step): radius_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + "_Rad" + str(radius)) # fixme- do this when only a single radius too radius_ras = path.join(cfg.BARRIERGDB, radius_fn) if radius == start_radius: # For first grid copy rather than mosaic arcpy.CopyRaster_management(radius_ras, trim_mosaic_fn) else: arcpy.Mosaic_management(radius_ras, trim_mosaic_ras, "MAXIMUM", "MATCH") # Copy result to output geodatabase arcpy.env.workspace = cfg.BARRIERGDB trim_mosaic_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix + radii_suffix) arcpy.CopyRaster_management(trim_mosaic_ras, trim_mosaic_fn) if not cfg.SAVE_RADIUS_RASTERS: arcpy.env.workspace = cfg.BARRIERGDB rasters = arcpy.ListRasters() for raster in rasters: if 'rad' in raster.lower() and 'step' not in raster.lower(): lu.delete_data(raster) arcpy.env.workspace = cfg.BARRIERGDB rasters = arcpy.ListRasters() for raster in rasters: gprint('\nBuilding output statistics and pyramids\n' 'for raster ' + raster) lu.build_stats(raster) # Clean up temporary files and directories if not cfg.SAVEBARRIERRASTERS: lu.delete_dir(cbarrierdir) lu.delete_dir(cfg.BARRIERBASEDIR) if not cfg.SAVEFOCALRASTERS: for radius in range(start_radius, end_radius + 1, radius_step): core1path = lu.get_focal_path(1, radius) path1 = path.split(core1path)[0] path2 = path.split(path1)[0] lu.delete_dir(path2) # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Failed in step 6. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except Exception: lu.dashline(1) gprint('****Failed in step 6. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def calc_closeness(lcp_lines): """Calculate relative closeness for each Least Cost Path.""" lm_util.gprint("Calculating relative closeness for each LCP line") normalize_field(lcp_lines, "LCP_Length", "Rel_Close", lp_env.RELCLOSENORMETH, True)
def cc_copy_inputs(): """Clip Climate Linkage Mapper inputs to smallest extent""" ext_poly = os.path.join(cc_env.out_dir, "ext_poly.shp") # Extent polygon try: lm_util.gprint("\nCOPYING LAYERS AND, IF NECESSARY, REDUCING EXTENT") if not arcpy.Exists(cc_env.inputs_gdb): arcpy.CreateFileGDB_management(os.path.dirname(cc_env.inputs_gdb), os.path.basename(cc_env.inputs_gdb)) climate_extent = arcpy.Raster(cc_env.climate_rast).extent if cc_env.resist_rast is not None: resist_extent = arcpy.Raster(cc_env.resist_rast).extent xmin = max(climate_extent.XMin, resist_extent.XMin) ymin = max(climate_extent.YMin, resist_extent.YMin) xmax = min(climate_extent.XMax, resist_extent.XMax) ymax = min(climate_extent.YMax, resist_extent.YMax) # Set to minimum extent if resistance raster was given arcpy.env.extent = arcpy.Extent(xmin, ymin, xmax, ymax) # Want climate and resistance rasters in same spatial ref # with same nodata cells proj_resist_rast = sa.Con( sa.IsNull(cc_env.climate_rast), sa.Int(cc_env.climate_rast), cc_env.resist_rast) proj_resist_rast.save(cc_env.prj_resist_rast) else: xmin = climate_extent.XMin ymin = climate_extent.YMin xmax = climate_extent.XMax ymax = climate_extent.YMax # Copying to gdb avoids gdal conflict later with ascii conversion ones_resist_rast = sa.Con( sa.IsNull(cc_env.climate_rast), sa.Int(cc_env.climate_rast), 1) ones_resist_rast.save(cc_env.prj_resist_rast) arcpy.CopyRaster_management(cc_env.climate_rast, cc_env.prj_climate_rast) # Create core raster arcpy.env.extent = arcpy.Extent(xmin, ymin, xmax, ymax) lm_util.delete_data(cc_env.prj_core_rast) arcpy.FeatureToRaster_conversion( cc_env.core_fc, cc_env.core_fld, cc_env.prj_core_rast, arcpy.Describe(cc_env.climate_rast).MeanCellHeight) arcpy.env.extent = None # Create array of boundary points array = arcpy.Array() pnt = arcpy.Point(xmin, ymin) array.add(pnt) pnt = arcpy.Point(xmax, ymin) array.add(pnt) pnt = arcpy.Point(xmax, ymax) array.add(pnt) pnt = arcpy.Point(xmin, ymax) array.add(pnt) # Add in the first point of the array again to close polygon boundary array.add(array.getObject(0)) # Create a polygon geometry object using the array object ext_feat = arcpy.Polygon(array) arcpy.CopyFeatures_management(ext_feat, ext_poly) # Clip core feature class arcpy.Clip_analysis(cc_env.core_fc, ext_poly, cc_env.prj_core_fc) except Exception: raise finally: cc_util.delete_features(ext_poly)
def cav(): """Calculate Core Area Value (CAV) and its components for each core.""" lm_util.gprint( "Calculating Core Area Value (CAV) and its components for each core") arcpy.MakeFeatureLayer_management(lp_env.COREFC, "core_lyr") # check weights and warn if issues if lp_env.OCAVRAST_IN: if lp_env.RESWEIGHT + lp_env.SIZEWEIGHT + lp_env.APWEIGHT + lp_env.ECAVWEIGHT + lp_env.CFCWEIGHT +\ lp_env.OCAVWEIGHT <> 1.0: lm_util.gprint( "Warning: RESWEIGHT + SIZEWEIGHT + APWEIGHT + ECAVWEIGHT + CFCWEIGHT + OCAVWEIGHT <> 1.0" ) else: if lp_env.RESWEIGHT + lp_env.SIZEWEIGHT + lp_env.APWEIGHT + lp_env.ECAVWEIGHT + lp_env.CFCWEIGHT <> 1.0: lm_util.gprint( "Warning: RESWEIGHT + SIZEWEIGHT + APWEIGHT + ECAVWEIGHT + CFCWEIGHT <> 1.0" ) if lp_env.OCAVWEIGHT > 0 and not lp_env.OCAVRAST_IN: lm_util.gprint( "Warning: OCAVWEIGHT > 0 but no OCAV raster input provided") if lp_env.OCAVWEIGHT == 0 and lp_env.OCAVRAST_IN: lm_util.gprint( "Warning: OCAV raster input provided, but OCAVWEIGHT = 0") # check/add fields check_add_field(lp_env.COREFC, "mean_res", "DOUBLE") check_add_field(lp_env.COREFC, "norm_res", "DOUBLE") check_add_field(lp_env.COREFC, "area", "DOUBLE") check_add_field(lp_env.COREFC, "norm_size", "DOUBLE") check_add_field(lp_env.COREFC, "perimeter", "DOUBLE") check_add_field(lp_env.COREFC, "ap_ratio", "DOUBLE") check_add_field(lp_env.COREFC, "norm_ratio", "DOUBLE") check_add_field(lp_env.COREFC, "cav", "DOUBLE") check_add_field(lp_env.COREFC, "norm_cav", "DOUBLE") check_add_field(lp_env.COREFC, "clim_env", "DOUBLE") check_add_field(lp_env.COREFC, "nclim_env", "DOUBLE") check_add_field(lp_env.COREFC, "fut_clim", "DOUBLE") check_add_field(lp_env.COREFC, "nfut_clim", "DOUBLE") check_add_field(lp_env.COREFC, "ocav", "DOUBLE") check_add_field(lp_env.COREFC, "nocav", "DOUBLE") if not check_add_field(lp_env.COREFC, "ecav", "DOUBLE"): if lp_env.ECAVWEIGHT > 0: lm_util.gprint( "Warning: ECAVWEIGHT > 0 but no ecav field in Cores feature class" ) arcpy.CalculateField_management(lp_env.COREFC, "ecav", "0") check_add_field(lp_env.COREFC, "necav", "DOUBLE") # current flow centrality (CFC, CF_Central) is copied from Centrality Mapper if not check_add_field(lp_env.COREFC, "CF_Central", "DOUBLE"): # default to 0s arcpy.CalculateField_management(lp_env.COREFC, "CF_Central", "0") if lp_env.CFCWEIGHT > 0: # copy values from Centrality Mapper output (core_centrality.gdb.project_Cores) if available centrality_cores = os.path.join(lm_env.CORECENTRALITYGDB, lm_env.PREFIX + "_Cores") if arcpy.Exists(centrality_cores): arcpy.AddJoin_management("core_lyr", lp_env.COREFN, centrality_cores, lp_env.COREFN) arcpy.CalculateField_management( "core_lyr", lp_env.CORENAME + ".CF_Central", "[" + lm_env.PREFIX + "_Cores.CF_Central]") arcpy.RemoveJoin_management("core_lyr") # ensure cores have at least one non-0 value for CFC (could have been copied above or set earlier) max_val = arcpy.SearchCursor( lm_env.COREFC, "", "", "", "CF_Central D").next().getValue("CF_Central") if max_val is None or max_val == 0: msg = ( "ERROR: A Current Flow Centrality Weight (CFCWEIGHT) was provided but no Current Flow Centrality " + "(CF_Central) values are available. Please run Centrality Mapper on this project, then run " + "Linkage Priority.") raise Exception(msg) check_add_field(lp_env.COREFC, "ncfc", "DOUBLE") # calc mean resistance stats_table = ZonalStatisticsAsTable( lp_env.COREFC, lp_env.COREFN, lp_env.RESRAST_IN, os.path.join(lm_env.SCRATCHDIR, "scratch.gdb", "core_resistance_stats")) arcpy.AddJoin_management("core_lyr", lp_env.COREFN, stats_table, lp_env.COREFN) arcpy.CalculateField_management("core_lyr", lp_env.CORENAME + ".mean_res", "[core_resistance_stats.MEAN]") arcpy.RemoveJoin_management("core_lyr") # calc area, perimeter and ratio arcpy.CalculateField_management("core_lyr", "area", "!SHAPE.AREA!", "PYTHON_9.3") arcpy.CalculateField_management("core_lyr", "perimeter", "!SHAPE.LENGTH!", "PYTHON_9.3") arcpy.CalculateField_management("core_lyr", "ap_ratio", "!area! / !perimeter!", "PYTHON_9.3") # normalize CAV inputs # resistance - invert normalize_field("core_lyr", "mean_res", "norm_res", lp_env.RESNORMETH, True) # size normalize_field("core_lyr", "area", "norm_size", lp_env.SIZENORMETH) # area/perimeter ratio normalize_field("core_lyr", "ap_ratio", "norm_ratio", lp_env.APNORMETH) # ecav normalize_field("core_lyr", "ecav", "necav", lp_env.ECAVNORMETH) # cfc normalize_field("core_lyr", "CF_Central", "ncfc", lp_env.CFCNORMETH) # calc OCAV if lp_env.OCAVRAST_IN: # get max and min lm_util.build_stats(lp_env.OCAVRAST_IN) result = arcpy.GetRasterProperties_management(lp_env.OCAVRAST_IN, "MAXIMUM") max_ocav = float(result.getOutput(0)) result = arcpy.GetRasterProperties_management(lp_env.OCAVRAST_IN, "MINIMUM") min_ocav = float(result.getOutput(0)) # calc score range normalization on input ocav_raster = (Raster(lp_env.OCAVRAST_IN) - min_ocav) / (max_ocav - min_ocav) # calc aerial mean ocav for each core ocav_table = ZonalStatisticsAsTable( lp_env.COREFC, lp_env.COREFN, ocav_raster, os.path.join(lm_env.SCRATCHDIR, "scratch.gdb", "core_ocav_stats")) arcpy.AddJoin_management("core_lyr", lp_env.COREFN, ocav_table, lp_env.COREFN) arcpy.CalculateField_management("core_lyr", lp_env.CORENAME + ".ocav", "[core_ocav_stats.MEAN]") arcpy.RemoveJoin_management("core_lyr") # calc score range normalization on output normalize_field("core_lyr", "ocav", "nocav", 0) # calc CAV arcpy.CalculateField_management( "core_lyr", "cav", "(!norm_res! * " + str(lp_env.RESWEIGHT) + ") + (!norm_size! * " + str(lp_env.SIZEWEIGHT) + ") + (!norm_ratio! * " + str(lp_env.APWEIGHT) + ") + (!necav! * " + str(lp_env.ECAVWEIGHT) + ") + (!ncfc! * " + str(lp_env.CFCWEIGHT) + ") + (!nocav! * " + str(lp_env.OCAVWEIGHT) + ")", "PYTHON_9.3") else: # calc CAV arcpy.CalculateField_management( "core_lyr", "cav", "(!norm_res! * " + str(lp_env.RESWEIGHT) + ") + (!norm_size! * " + str(lp_env.SIZEWEIGHT) + ") + (!norm_ratio! * " + str(lp_env.APWEIGHT) + ") + (!necav! * " + str(lp_env.ECAVWEIGHT) + ") + (!ncfc! * " + str(lp_env.CFCWEIGHT) + ")", "PYTHON_9.3") # normalize CAV with score range normalization normalize_field("core_lyr", "cav", "norm_cav", 0)