def grass_cwd(core_list): """Creating CWD and Back rasters using GRASS r.walk function""" cur_path = subprocess.Popen("echo %PATH%", stdout=subprocess.PIPE, shell=True).stdout.read() gisdbase = os.path.join(cc_env.proj_dir, "gwksp") ccr_grassrc = os.path.join(cc_env.proj_dir, "ccr_grassrc") climate_asc = os.path.join(cc_env.out_dir, "cc_climate.asc") resist_asc = os.path.join(cc_env.out_dir, "cc_resist.asc") core_asc = os.path.join(cc_env.out_dir, "cc_cores.asc") climate_lyr = "climate" resist_lyr = "resist" core_lyr = "cores" try: lm_util.gprint("\nRUNNING GRASS TO CREATE COST-WEIGHTED DISTANCE " "RASTERS") # Convert input GRID rasters to ASCII lm_util.gprint("Converting ARCINFO GRID rasters to ASCII") # Note: consider moving these to main: arcpy.RasterToASCII_conversion(cc_env.prj_climate_rast, climate_asc) arcpy.RasterToASCII_conversion(cc_env.prj_resist_rast, resist_asc) arcpy.RasterToASCII_conversion(cc_env.prj_core_rast, core_asc) # Create resource file and setup workspace write_grassrc(ccr_grassrc, gisdbase) setup_wrkspace(gisdbase, ccr_grassrc, climate_asc) # Make cwd folder for Linkage Mapper lm_util.make_cwd_paths(max(core_list)) # Import files into GRASS lm_util.gprint("Importing raster files into GRASS") run_grass_cmd("r.in.arc", input=climate_asc, output=climate_lyr) run_grass_cmd("r.in.arc", input=resist_asc, output=resist_lyr) run_grass_cmd("r.in.arc", input=core_asc, output=core_lyr) # Generate CWD and Back rasters gen_cwd_back(core_list, climate_lyr, resist_lyr, core_lyr) except Exception: raise finally: os.environ['PATH'] = cur_path # Revert to original windows path if not cc_util.remove_grass_wkspc(gisdbase): arcpy.AddWarning("Unable to delete temporary GRASS folder. " "Program will contine.") cc_util.delete_features( [climate_asc, resist_asc, core_asc, ccr_grassrc])
def grass_cwd(core_list): """Creating CWD and Back rasters using GRASS r.walk function""" gisdbase = os.path.join(cc_env.proj_dir, "gwksp") ccr_grassrc = os.path.join(cc_env.proj_dir, "ccr_grassrc") climate_asc = os.path.join(cc_env.out_dir, "cc_climate.asc") resist_asc = os.path.join(cc_env.out_dir, "cc_resist.asc") core_asc = os.path.join(cc_env.out_dir, "cc_cores.asc") climate_lyr = "climate" resist_lyr = "resist" core_lyr = "cores" try: lm_util.gprint("\nRUNNING GRASS TO CREATE COST-WEIGHTED DISTANCE " "RASTERS") # Convert input GRID rasters to ASCII lm_util.gprint("Converting ARCINFO GRID rasters to ASCII") arcpy.RasterToASCII_conversion(cc_env.prj_climate_rast, climate_asc) arcpy.RasterToASCII_conversion(cc_env.prj_resist_rast, resist_asc) arcpy.RasterToASCII_conversion(cc_env.prj_core_rast, core_asc) # Create resource file and setup workspace start_path = os.environ["PATH"] os.environ["PATH"] = cc_env.gpath write_grassrc(ccr_grassrc, gisdbase) setup_wrkspace(gisdbase, ccr_grassrc, climate_asc) # Make cwd folder for Linkage Mapper lm_util.make_cwd_paths(max(core_list)) # Import files into GRASS lm_util.gprint("Importing raster files into GRASS") run_grass_cmd("r.in.gdal", input=climate_asc, output=climate_lyr) run_grass_cmd("r.in.gdal", input=resist_asc, output=resist_lyr) run_grass_cmd("r.in.gdal", input=core_asc, output=core_lyr) # Generate CWD and Back rasters gen_cwd_back(core_list, climate_lyr, resist_lyr, core_lyr) except Exception: raise finally: os.environ["PATH"] = start_path if not cc_util.remove_grass_wkspc(gisdbase): lm_util.warn("Unable to delete temporary GRASS folder. " "Program will contine.") cc_util.delete_features( [climate_asc, resist_asc, core_asc, ccr_grassrc])
def grass_cwd(core_list): """Creating CWD and Back rasters using GRASS r.walk function""" out_fldr = cc_env.scratch_dir gisdbase = os.path.join(out_fldr, "cc_grass") ccr_grassrc = os.path.join(out_fldr, "cc_grassrc") climate_asc = os.path.join(out_fldr, "cc_gclimate.asc") resist_asc = os.path.join(out_fldr, "cc_gresist.asc") core_asc = os.path.join(out_fldr, "cc_gcores.asc") climate_lyr = "climate" resist_lyr = "resist" core_lyr = "cores" try: lm_util.gprint("\nRUNNING GRASS TO CREATE COST-WEIGHTED DISTANCE " "RASTERS") start_path = os.environ["PATH"] # Convert input GRID rasters to ASCII lm_util.gprint("Converting ARCINFO GRID rasters to ASCII") arcpy.RasterToASCII_conversion(cc_env.prj_climate_rast, climate_asc) arcpy.RasterToASCII_conversion(cc_env.prj_resist_rast, resist_asc) arcpy.RasterToASCII_conversion(cc_env.prj_core_rast, core_asc) # Create resource file and setup workspace os.environ["PATH"] = cc_env.gpath write_grassrc(ccr_grassrc, gisdbase) setup_wrkspace(gisdbase, ccr_grassrc, climate_asc) # Make cwd folder for Linkage Mapper lm_util.make_cwd_paths(max(core_list)) # Import files into GRASS lm_util.gprint("Importing raster files into GRASS") run_grass_cmd("r.in.gdal", input=climate_asc, output=climate_lyr) run_grass_cmd("r.in.gdal", input=resist_asc, output=resist_lyr) run_grass_cmd("r.in.gdal", input=core_asc, output=core_lyr) # Generate CWD and Back rasters gen_cwd_back(core_list, climate_lyr, resist_lyr, core_lyr) except Exception: raise finally: os.environ["PATH"] = start_path cc_util.arc_delete(gisdbase, ccr_grassrc, climate_asc, resist_asc, core_asc)
def STEP3_calc_cwds(): """Calculates cost-weighted distances from each core area. Uses bounding circles around source and target cores to limit extent of cwd calculations and speed computation. """ try: lu.dashline(1) gprint('Running script ' + _SCRIPT_NAME) lu.dashline(0) # Super secret setting to re-start failed run. Enter 'RESTART' as the # Name of the pairwise distance table in step 2, and uncheck step 2. # We can eventually place this in a .ini file. rerun = False if cfg.S2EUCDISTFILE != None: if cfg.S2EUCDISTFILE.lower() == "restart": rerun = True # if cfg.TMAXCWDIST is None: # gprint('NOT using a maximum cost-weighted distance.') # else: # gprint('Max cost-weighted distance for CWD calcs set ' # 'to ' + str(cfg.TMAXCWDIST) + '\n') if (cfg.BUFFERDIST) is not None: gprint('Bounding circles plus a buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units will ' 'be used \n to limit extent of cost distance ' 'calculations.') elif cfg.TOOL <> cfg.TOOL_CC: gprint('NOT using bounding circles in cost distance ' 'calculations.') # set the analysis extent and cell size # So we don't extract rasters that go beyond extent of original raster if arcpy: arcpy.env.cellSize = cfg.RESRAST arcpy.env.extent="MINOF" else: gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight gp.Extent = "MINOF" gp.mask = cfg.RESRAST if arcpy: arcpy.env.overwriteOutput = True arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR else: gp.OverwriteOutput = True gp.workspace = cfg.SCRATCHDIR gp.scratchWorkspace = cfg.ARCSCRATCHDIR # Load linkTable (created in previous script) linkTableFile = lu.get_prev_step_link_table(step=3) linkTable = lu.load_link_table(linkTableFile) lu.report_links(linkTable) # Identify cores to map from LinkTable coresToMap = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) numCoresToMap = len(coresToMap) if numCoresToMap < 3: # No need to check for intermediate cores, because there aren't any cfg.S3DROPLCCSic = False else: cfg.S3DROPLCCSic = cfg.S3DROPLCCS gprint('\nNumber of core areas to connect: ' + str(numCoresToMap)) if rerun: # If picking up a failed run, make sure needed files are there lu.dashline(1) gprint ('\n****** RESTART MODE ENABLED ******\n') gprint ('**** NOTE: This mode picks up step 3 where a\n' 'previous run left off due to a crash or user\n' 'abort. It assumes you are using the same input\n' 'data used in the terminated run.\n\n') lu.warn('IMPORTANT: Your LCP and stick feature classes\n' 'will LOSE LCPs that were already created, but\n' 'your final raster corridor map should be complete.\n') lu.dashline(0) lu.snooze(10) savedLinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") coreListFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") if not path.exists(savedLinkTableFile) or not path.exists( coreListFile): gprint('No partial results file found from previous ' 'stopped run. Starting run from beginning.\n') lu.dashline(0) rerun = False # If picking up a failed run, use old folders if not rerun: startIndex = 0 if cfg.TOOL <> cfg.TOOL_CC: lu.make_cwd_paths(max(coresToMap)) # Set up cwd directories # make a feature layer for input cores to select from gp.MakeFeatureLayer(cfg.COREFC, cfg.FCORES) # Drop links that are too long gprint('\nChecking for corridors that are too long to map.') DISABLE_LEAST_COST_NO_VAL = False linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, 0, cfg.MAXCOSTDIST, 0, DISABLE_LEAST_COST_NO_VAL) # ------------------------------------------------------------------ # Bounding boxes if (cfg.BUFFERDIST) is not None: # create bounding boxes around cores start_time = time.clock() # lu.dashline(1) gprint('Calculating bounding boxes for core areas.') extentBoxList = npy.zeros((0,5), dtype='float32') for x in range(len(coresToMap)): core = coresToMap[x] boxCoords = lu.get_extent_box_coords(core) extentBoxList = npy.append(extentBoxList, boxCoords, axis=0) gprint('\nDone calculating bounding boxes.') start_time = lu.elapsed_time(start_time) # lu.dashline() # Bounding circle code if cfg.BUFFERDIST is not None: # Make a set of circles encompassing core areas we'll be connecting start_time = time.clock() gprint('Calculating bounding circles around potential' ' corridors.') # x y corex corey radius- stores data for bounding circle centroids boundingCirclePointArray = npy.zeros((0,5), dtype='float32') circleList = npy.zeros((0,3), dtype='int32') numLinks = linkTable.shape[0] for x in range(0, numLinks): if ((linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_CORR) or (linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_KEEP)): # if it's a valid corridor link linkId = int(linkTable[x,cfg.LTB_LINKID]) # fixme- this code is clumsy- can trim down cores = npy.zeros((1,3), dtype='int32') cores[0,:] = npy.sort([0, linkTable[x,cfg.LTB_CORE1], linkTable[x,cfg.LTB_CORE2]]) corex = cores[0,1] corey = cores[0,2] cores[0,0] = linkId ################### foundFlag = False for y in range(0,len(circleList)): # clumsy if (circleList[y,1] == corex and circleList[y,2] == corey): foundFlag = True if not foundFlag: circlePointData = ( lu.get_bounding_circle_data(extentBoxList, corex, corey, cfg.BUFFERDIST)) boundingCirclePointArray = ( npy.append(boundingCirclePointArray, circlePointData, axis=0)) # keep track of which cores we draw bounding circles # around circleList = npy.append(circleList, cores, axis=0) gprint('\nCreating bounding circles using buffer ' 'analysis.') dir, BNDCIRCENS = path.split(cfg.BNDCIRCENS) lu.make_points(cfg.SCRATCHDIR, boundingCirclePointArray, BNDCIRCENS) lu.delete_data(cfg.BNDCIRS) gp.buffer_analysis(cfg.BNDCIRCENS, cfg.BNDCIRS, "radius") gp.deletefield (cfg.BNDCIRS, "BUFF_DIST") gprint('Successfully created bounding circles around ' 'potential corridors using \na buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.') start_time = lu.elapsed_time(start_time) gprint('Reducing global processing area using bounding ' 'circle plus buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.\n') extentBoxList = npy.zeros((0,5),dtype='float32') boxCoords = lu.get_extent_box_coords() extentBoxList = npy.append(extentBoxList,boxCoords,axis=0) extentBoxList[0,0] = 0 boundingCirclePointArray = npy.zeros((0,5),dtype='float32') circlePointData=lu.get_bounding_circle_data(extentBoxList, 0, 0, cfg.BUFFERDIST) dir, BNDCIRCEN = path.split(cfg.BNDCIRCEN) lu.make_points(cfg.SCRATCHDIR, circlePointData, BNDCIRCEN) lu.delete_data(cfg.BNDCIR) gp.buffer_analysis(cfg.BNDCIRCEN, cfg.BNDCIR, "radius") gprint('Extracting raster....') cfg.BOUNDRESIS = cfg.BOUNDRESIS + tif lu.delete_data(cfg.BOUNDRESIS) count = 0 statement = ( 'gp.ExtractByMask_sa(cfg.RESRAST, cfg.BNDCIR, cfg.BOUNDRESIS)') while True: try: exec statement randomerror() except: count,tryAgain = lu.retry_arc_error(count,statement) if not tryAgain: exec statement else: break gprint('\nReduced resistance raster extracted using ' 'bounding circle.') else: #if not using bounding circles, just go with resistance raster. cfg.BOUNDRESIS = cfg.RESRAST # --------------------------------------------------------------------- # Rasterize core areas to speed cost distance calcs # lu.dashline(1) gprint("Creating core area raster.") gp.SelectLayerByAttribute(cfg.FCORES, "CLEAR_SELECTION") if arcpy: arcpy.env.cellSize = cfg.BOUNDRESIS arcpy.env.extent = cfg.BOUNDRESIS else: gp.cellSize = gp.Describe(cfg.BOUNDRESIS).MeanCellHeight gp.extent = gp.Describe(cfg.BOUNDRESIS).extent if rerun: # saved linktable replaces the one now in memory linkTable = lu.load_link_table(savedLinkTableFile) coresToMapSaved = npy.loadtxt(coreListFile, dtype='Float64', comments='#', delimiter=',') startIndex = coresToMapSaved[0] # Index of core where we left off del coresToMapSaved gprint ('\n****** Re-starting run at core area number ' + str(int(coresToMap[startIndex]))+ ' ******\n') lu.dashline(0) if arcpy: arcpy.env.extent = "MINOF" else: gp.extent = "MINOF" #---------------------------------------------------------------------- # Loop through cores, do cwd calcs for each if cfg.TOOL == cfg.TOOL_CC: gprint("\nMapping least-cost paths.\n") else: gprint("\nStarting cost distance calculations.\n") lcpLoop = 0 failures = 0 x = startIndex endIndex = len(coresToMap) linkTableMod = linkTable.copy() while x < endIndex: startTime1 = time.clock() # Modification of linkTable in function was causing problems. so # make a copy: linkTablePassed = linkTableMod.copy() (linkTableReturned, failures, lcpLoop) = do_cwd_calcs(x, linkTablePassed, coresToMap, lcpLoop, failures) if failures == 0: # If iteration was successful, continue with next core linkTableMod = linkTableReturned sourceCore = int(coresToMap[x]) gprint('Done with all calculations for core ID #' + str(sourceCore) + '. ' + str(int(x + 1)) + ' of ' + str(endIndex) + ' cores have been processed.') start_time = lu.elapsed_time(startTime1) outlinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.write_link_table(linkTableMod, outlinkTableFile) # Increment loop counter x = x + 1 else: # If iteration failed, try again after a wait period delay_restart(failures) #---------------------------------------------------------------------- linkTable = linkTableMod # reinstate temporarily disabled links rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE] > 1000) linkTable[rows,cfg.LTB_LINKTYPE] = (linkTable[rows,cfg.LTB_LINKTYPE] - 1000) # Drop links that are too long DISABLE_LEAST_COST_NO_VAL = True linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST, cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL) # Write link table file outlinkTableFile = lu.get_this_step_link_table(step=3) gprint('Updating ' + outlinkTableFile) lu.write_link_table(linkTable, outlinkTableFile) linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s3.csv") lu.write_link_table(linkTable, linkTableLogFile) start_time = time.clock() gprint('Creating shapefiles with linework for links...') try: lu.write_link_maps(outlinkTableFile, step=3) except: lu.write_link_maps(outlinkTableFile, step=3) start_time = lu.elapsed_time(start_time) gprint('\nIndividual cost-weighted distance layers written ' 'to "cwd" directory. \n') gprint(outlinkTableFile + '\n updated with cost-weighted distances between core areas.') #Clean up temporary files for restart code tempFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") lu.delete_file(tempFile) tempFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.delete_file(tempFile) # Check if climate tool is calling linkage mapper if cfg.TOOL == cfg.TOOL_CC: coreList = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) for core in coreList: cwdRaster = lu.get_cwd_path(int(core)) back_rast = cwdRaster.replace("cwd_", "back_") lu.delete_data(back_rast) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def STEP3_calc_cwds(): """Calculates cost-weighted distances from each core area. Uses bounding circles around source and target cores to limit extent of cwd calculations and speed computation. """ try: lu.dashline(1) gprint('Running script ' + _SCRIPT_NAME) lu.dashline(0) # Super secret setting to re-start failed run. Enter 'RESTART' as the # Name of the pairwise distance table in step 2, and uncheck step 2. # We can eventually place this in a .ini file. rerun = False if cfg.S2EUCDISTFILE != None: if cfg.S2EUCDISTFILE.lower() == "restart": rerun = True # if cfg.TMAXCWDIST is None: # gprint('NOT using a maximum cost-weighted distance.') # else: # gprint('Max cost-weighted distance for CWD calcs set ' # 'to ' + str(cfg.TMAXCWDIST) + '\n') if (cfg.BUFFERDIST) is not None: gprint('Bounding circles plus a buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units will ' 'be used \n to limit extent of cost distance ' 'calculations.') elif cfg.TOOL <> cfg.TOOL_CC: gprint('NOT using bounding circles in cost distance ' 'calculations.') # set the analysis extent and cell size # So we don't extract rasters that go beyond extent of original raster if arcpy: arcpy.env.cellSize = cfg.RESRAST arcpy.env.extent="MINOF" else: gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight gp.Extent = "MINOF" gp.mask = cfg.RESRAST if arcpy: arcpy.env.overwriteOutput = True arcpy.env.workspace = cfg.SCRATCHDIR arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR else: gp.OverwriteOutput = True gp.workspace = cfg.SCRATCHDIR gp.scratchWorkspace = cfg.ARCSCRATCHDIR # Load linkTable (created in previous script) linkTableFile = lu.get_prev_step_link_table(step=3) linkTable = lu.load_link_table(linkTableFile) lu.report_links(linkTable) # Identify cores to map from LinkTable coresToMap = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) numCoresToMap = len(coresToMap) if numCoresToMap < 3: # No need to check for intermediate cores, because there aren't any cfg.S3DROPLCCSic = False else: cfg.S3DROPLCCSic = cfg.S3DROPLCCS gprint('\nNumber of core areas to connect: ' + str(numCoresToMap)) if rerun: # If picking up a failed run, make sure needed files are there lu.dashline(1) gprint ('\n****** RESTART MODE ENABLED ******\n') gprint ('**** NOTE: This mode picks up step 3 where a\n' 'previous run left off due to a crash or user\n' 'abort. It assumes you are using the same input\n' 'data used in the terminated run.****\n') lu.dashline(0) lu.snooze(10) savedLinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") coreListFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") if not path.exists(savedLinkTableFile) or not path.exists( coreListFile): gprint('No partial results file found from previous ' 'stopped run. Starting run from beginning.\n') lu.dashline(0) rerun = False # If picking up a failed run, use old folders if not rerun: startIndex = 0 if cfg.TOOL <> cfg.TOOL_CC: lu.make_cwd_paths(max(coresToMap)) # Set up cwd directories # make a feature layer for input cores to select from gp.MakeFeatureLayer(cfg.COREFC, cfg.FCORES) # Drop links that are too long gprint('\nChecking for corridors that are too long to map.') DISABLE_LEAST_COST_NO_VAL = False linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, 0, cfg.MAXCOSTDIST, 0, DISABLE_LEAST_COST_NO_VAL) # ------------------------------------------------------------------ # Bounding boxes if (cfg.BUFFERDIST) is not None: # create bounding boxes around cores start_time = time.clock() # lu.dashline(1) gprint('Calculating bounding boxes for core areas.') extentBoxList = npy.zeros((0,5), dtype='float32') for x in range(len(coresToMap)): core = coresToMap[x] boxCoords = lu.get_extent_box_coords(core) extentBoxList = npy.append(extentBoxList, boxCoords, axis=0) gprint('\nDone calculating bounding boxes.') start_time = lu.elapsed_time(start_time) # lu.dashline() # Bounding circle code if cfg.BUFFERDIST is not None: # Make a set of circles encompassing core areas we'll be connecting start_time = time.clock() gprint('Calculating bounding circles around potential' ' corridors.') # x y corex corey radius- stores data for bounding circle centroids boundingCirclePointArray = npy.zeros((0,5), dtype='float32') circleList = npy.zeros((0,3), dtype='int32') numLinks = linkTable.shape[0] for x in range(0, numLinks): if ((linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_CORR) or (linkTable[x,cfg.LTB_LINKTYPE] == cfg.LT_KEEP)): # if it's a valid corridor link linkId = int(linkTable[x,cfg.LTB_LINKID]) # fixme- this code is clumsy- can trim down cores = npy.zeros((1,3), dtype='int32') cores[0,:] = npy.sort([0, linkTable[x,cfg.LTB_CORE1], linkTable[x,cfg.LTB_CORE2]]) corex = cores[0,1] corey = cores[0,2] cores[0,0] = linkId ################### foundFlag = False for y in range(0,len(circleList)): # clumsy if (circleList[y,1] == corex and circleList[y,2] == corey): foundFlag = True if not foundFlag: circlePointData = ( lu.get_bounding_circle_data(extentBoxList, corex, corey, cfg.BUFFERDIST)) boundingCirclePointArray = ( npy.append(boundingCirclePointArray, circlePointData, axis=0)) # keep track of which cores we draw bounding circles # around circleList = npy.append(circleList, cores, axis=0) gprint('\nCreating bounding circles using buffer ' 'analysis.') dir, BNDCIRCENS = path.split(cfg.BNDCIRCENS) lu.make_points(cfg.SCRATCHDIR, boundingCirclePointArray, BNDCIRCENS) lu.delete_data(cfg.BNDCIRS) gp.buffer_analysis(cfg.BNDCIRCENS, cfg.BNDCIRS, "radius") gp.deletefield (cfg.BNDCIRS, "BUFF_DIST") gprint('Successfully created bounding circles around ' 'potential corridors using \na buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.') start_time = lu.elapsed_time(start_time) gprint('Reducing global processing area using bounding ' 'circle plus buffer of ' + str(float(cfg.BUFFERDIST)) + ' map units.\n') extentBoxList = npy.zeros((0,5),dtype='float32') boxCoords = lu.get_extent_box_coords() extentBoxList = npy.append(extentBoxList,boxCoords,axis=0) extentBoxList[0,0] = 0 boundingCirclePointArray = npy.zeros((0,5),dtype='float32') circlePointData=lu.get_bounding_circle_data(extentBoxList, 0, 0, cfg.BUFFERDIST) dir, BNDCIRCEN = path.split(cfg.BNDCIRCEN) lu.make_points(cfg.SCRATCHDIR, circlePointData, BNDCIRCEN) lu.delete_data(cfg.BNDCIR) gp.buffer_analysis(cfg.BNDCIRCEN, cfg.BNDCIR, "radius") gprint('Extracting raster....') cfg.BOUNDRESIS = cfg.BOUNDRESIS + tif lu.delete_data(cfg.BOUNDRESIS) count = 0 statement = ( 'gp.ExtractByMask_sa(cfg.RESRAST, cfg.BNDCIR, cfg.BOUNDRESIS)') while True: try: exec statement randomerror() except: count,tryAgain = lu.retry_arc_error(count,statement) if not tryAgain: exec statement else: break gprint('\nReduced resistance raster extracted using ' 'bounding circle.') else: #if not using bounding circles, just go with resistance raster. cfg.BOUNDRESIS = cfg.RESRAST # --------------------------------------------------------------------- # Rasterize core areas to speed cost distance calcs # lu.dashline(1) gprint("Creating core area raster.") gp.SelectLayerByAttribute(cfg.FCORES, "CLEAR_SELECTION") if arcpy: arcpy.env.cellSize = cfg.BOUNDRESIS arcpy.env.extent = cfg.BOUNDRESIS else: gp.cellSize = gp.Describe(cfg.BOUNDRESIS).MeanCellHeight gp.extent = gp.Describe(cfg.BOUNDRESIS).extent if rerun: # saved linktable replaces the one now in memory linkTable = lu.load_link_table(savedLinkTableFile) coresToMapSaved = npy.loadtxt(coreListFile, dtype='Float64', comments='#', delimiter=',') startIndex = coresToMapSaved[0] # Index of core where we left off del coresToMapSaved gprint ('\n****** Re-starting run at core area number ' + str(int(coresToMap[startIndex]))+ ' ******\n') lu.dashline(0) if arcpy: arcpy.env.extent = "MINOF" else: gp.extent = "MINOF" #---------------------------------------------------------------------- # Loop through cores, do cwd calcs for each if cfg.TOOL == cfg.TOOL_CC: gprint("\nMapping least-cost paths.\n") else: gprint("\nStarting cost distance calculations.\n") lcpLoop = 0 failures = 0 x = startIndex endIndex = len(coresToMap) linkTableMod = linkTable.copy() while x < endIndex: startTime1 = time.clock() # Modification of linkTable in function was causing problems. so # make a copy: linkTablePassed = linkTableMod.copy() (linkTableReturned, failures, lcpLoop) = do_cwd_calcs(x, linkTablePassed, coresToMap, lcpLoop, failures) if failures == 0: # If iteration was successful, continue with next core linkTableMod = linkTableReturned sourceCore = int(coresToMap[x]) gprint('Done with all calculations for core ID #' + str(sourceCore) + '. ' + str(int(x + 1)) + ' of ' + str(endIndex) + ' cores have been processed.') start_time = lu.elapsed_time(startTime1) outlinkTableFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.write_link_table(linkTableMod, outlinkTableFile) # Increment loop counter x = x + 1 else: # If iteration failed, try again after a wait period delay_restart(failures) #---------------------------------------------------------------------- linkTable = linkTableMod # reinstate temporarily disabled links rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE] > 1000) linkTable[rows,cfg.LTB_LINKTYPE] = (linkTable[rows,cfg.LTB_LINKTYPE] - 1000) # Drop links that are too long DISABLE_LEAST_COST_NO_VAL = True linkTable,numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST, cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL) # Write link table file outlinkTableFile = lu.get_this_step_link_table(step=3) gprint('Updating ' + outlinkTableFile) lu.write_link_table(linkTable, outlinkTableFile) linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s3.csv") lu.write_link_table(linkTable, linkTableLogFile) start_time = time.clock() gprint('Creating shapefiles with linework for links...') try: lu.write_link_maps(outlinkTableFile, step=3) except: lu.write_link_maps(outlinkTableFile, step=3) start_time = lu.elapsed_time(start_time) gprint('\nIndividual cost-weighted distance layers written ' 'to "cwd" directory. \n') gprint(outlinkTableFile + '\n updated with cost-weighted distances between core areas.') #Clean up temporary files for restart code tempFile = path.join(cfg.DATAPASSDIR, "temp_cores_to_map.csv") lu.delete_file(tempFile) tempFile = path.join(cfg.DATAPASSDIR, "temp_linkTable_s3_partial.csv") lu.delete_file(tempFile) # Check if climate tool is calling linkage mapper if cfg.TOOL == cfg.TOOL_CC: coreList = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) for core in coreList: cwdRaster = lu.get_cwd_path(int(core)) back_rast = cwdRaster.replace("cwd_", "back_") lu.delete_data(back_rast) # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 3. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return