def get_full_adj_list(): try: if not cfg.S2ADJMETH_CW and not cfg.S2ADJMETH_EU: # Keep ALL links coreList = lu.get_core_list(cfg.COREFC, cfg.COREFN) coreList = coreList[:, 0] numCores = len(coreList) adjList = npy.zeros((numCores * (numCores - 1) / 2, 2), dtype="int32") pairIndex = 0 for sourceIndex in range(0, numCores - 1): for targetIndex in range(sourceIndex + 1, numCores): adjList[pairIndex, 0] = coreList[sourceIndex] adjList[pairIndex, 1] = coreList[targetIndex] pairIndex = pairIndex + 1 return adjList eucAdjList = get_adj_list(cfg.EUCADJFILE) if cfg.S2ADJMETH_CW: cwdAdjList = get_adj_list(cfg.CWDADJFILE) adjList = npy.append(eucAdjList, cwdAdjList, axis=0) else: adjList = eucAdjList adjList = npy.sort(adjList) # sort by 1st core Id then by 2nd core Id ind = npy.lexsort((adjList[:, 1], adjList[:, 0])) adjList = adjList[ind] numDists = len(adjList) x = 1 while x < numDists: if (adjList[x, 0] == adjList[x - 1, 0] and adjList[x, 1] == adjList[x - 1, 1]): adjList[x - 1, 0] = 0 # mark for deletion x = x + 1 if numDists > 0: delRows = npy.asarray(npy.where(adjList[:, 0] == 0)) delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32") delRowsVector[:] = delRows[0, :] adjList = lu.delete_row(adjList, delRowsVector) del delRows del delRowsVector return adjList except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def get_full_adj_list(): try: if not cfg.S2ADJMETH_CW and not cfg.S2ADJMETH_EU: # Keep ALL links coreList = lu.get_core_list(cfg.COREFC, cfg.COREFN) coreList = coreList[:,0] numCores = len(coreList) adjList = npy.zeros((numCores*(numCores-1)/2,2), dtype="int32") pairIndex = 0 for sourceIndex in range(0,numCores-1): for targetIndex in range(sourceIndex + 1, numCores): adjList[pairIndex,0]=coreList[sourceIndex] adjList[pairIndex,1]=coreList[targetIndex] pairIndex = pairIndex + 1 return adjList eucAdjList = get_adj_list(cfg.EUCADJFILE) if cfg.S2ADJMETH_CW: cwdAdjList = get_adj_list(cfg.CWDADJFILE) adjList = npy.append(eucAdjList, cwdAdjList, axis=0) else: adjList = eucAdjList adjList = npy.sort(adjList) # sort by 1st core Id then by 2nd core Id ind = npy.lexsort((adjList[:, 1], adjList[:, 0])) adjList = adjList[ind] numDists = len(adjList) x = 1 while x < numDists: if (adjList[x, 0] == adjList[x - 1, 0] and adjList[x, 1] == adjList[x - 1, 1]): adjList[x - 1, 0] = 0 # mark for deletion x = x + 1 if numDists > 0: delRows = npy.asarray(npy.where(adjList[:, 0] == 0)) delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32") delRowsVector[:] = delRows[0, :] adjList = lu.delete_row(adjList, delRowsVector) del delRows del delRowsVector return adjList except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def STEP7_calc_centrality(): """ Analyze network centrality using Circuitscape given Linkage Mapper outputs """ try: lu.dashline(0) gprint('Running script ' + _SCRIPT_NAME) arcpy.env.workspace = cfg.SCRATCHDIR # Check for valid LCP shapefile prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep=7) if not arcpy.Exists(prevLcpShapefile): msg = ('Cannot find an LCP shapefile from step 5. Please ' 'rerun that step and any previous ones if necessary.') lu.raise_error(msg) # Remove lcp shapefile from this step if run previously lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s7.shp") lu.delete_data(lcpShapefile) invalidFNs = ['fid', 'id', 'oid', 'shape'] if cfg.COREFN.lower() in invalidFNs: #if cfg.COREFN == 'FID' or cfg.COREFN == 'ID': lu.dashline(1) msg = ('ERROR: Core area field names ID, FID, SHAPE, and OID are' ' reserved for ArcGIS. \nPlease choose another field- must' ' be a positive integer.') lu.raise_error(msg) lu.dashline(1) gprint('Mapping centrality of network cores and links' '\nusing Circuitscape....') lu.dashline(0) # set the analysis extent and cell size to that of the resistance # surface coreCopy = path.join(cfg.SCRATCHDIR, 'cores.shp') arcpy.CopyFeatures_management(cfg.COREFC, coreCopy) if not arcpy.ListFields(coreCopy, "CF_Central"): arcpy.AddField_management(coreCopy, "CF_Central", "DOUBLE") inLinkTableFile = lu.get_prev_step_link_table(step=7) linkTable = lu.load_link_table(inLinkTableFile) numLinks = linkTable.shape[0] numCorridorLinks = lu.report_links(linkTable) if numCorridorLinks == 0: lu.dashline(1) msg = ('\nThere are no linkages. Bailing.') lu.raise_error(msg) if linkTable.shape[1] < 16: # If linktable has no entries from prior # centrality or pinchpint analyses extraCols = npy.zeros((numLinks, 6), dtype=npy.float64) linkTable = linkTable[:, 0:10] linkTable = npy.append(linkTable, extraCols, axis=1) linkTable[:, cfg.LTB_LCPLEN] = -1 linkTable[:, cfg.LTB_CWDEUCR] = -1 linkTable[:, cfg.LTB_CWDPATHR] = -1 linkTable[:, cfg.LTB_EFFRESIST] = -1 linkTable[:, cfg.LTB_CWDTORR] = -1 del extraCols linkTable[:, cfg.LTB_CURRENT] = -1 coresToProcess = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) maxCoreNum = max(coresToProcess) del coresToProcess lu.dashline(0) coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1] coreList = npy.sort(coreList) # set up directory for centrality INCENTRALITYDIR = cfg.CENTRALITYBASEDIR OUTCENTRALITYDIR = path.join(cfg.CENTRALITYBASEDIR, cfg.CIRCUITOUTPUTDIR_NM) CONFIGDIR = path.join(INCENTRALITYDIR, cfg.CIRCUITCONFIGDIR_NM) # Set Circuitscape options and write config file options = lu.set_cs_options() options['data_type'] = 'network' options['habitat_file'] = path.join(INCENTRALITYDIR, 'Circuitscape_graph.txt') # Setting point file equal to graph to do all pairs in Circuitscape options['point_file'] = path.join(INCENTRALITYDIR, 'Circuitscape_graph.txt') outputFN = 'Circuitscape_network.out' options['output_file'] = path.join(OUTCENTRALITYDIR, outputFN) configFN = 'Circuitscape_network.ini' outConfigFile = path.join(CONFIGDIR, configFN) lu.write_cs_cfg_file(outConfigFile, options) delRows = npy.asarray(npy.where(linkTable[:, cfg.LTB_LINKTYPE] < 1)) delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32") delRowsVector[:] = delRows[0, :] LT = lu.delete_row(linkTable, delRowsVector) del delRows del delRowsVector graphList = npy.zeros((LT.shape[0], 3), dtype=npy.float64) graphList[:, 0] = LT[:, cfg.LTB_CORE1] graphList[:, 1] = LT[:, cfg.LTB_CORE2] graphList[:, 2] = LT[:, cfg.LTB_CWDIST] write_graph(options['habitat_file'], graphList) gprint('\nCalculating current flow centrality using Circuitscape...') memFlag = lu.call_circuitscape(cfg.CSPATH, outConfigFile) outputFN = 'Circuitscape_network_branch_currents_cum.txt' currentList = path.join(OUTCENTRALITYDIR, outputFN) if not arcpy.Exists(currentList): write_graph(options['habitat_file'], graphList) gprint('\nCalculating current flow centrality using Circuitscape ' '(2nd try)...') memFlag = lu.call_circuitscape(cfg.CSPATH, outConfigFile) if not arcpy.Exists(currentList): lu.dashline(1) msg = ('ERROR: No Circuitscape output found.\n' 'It looks like Circuitscape failed.') arcpy.AddError(msg) lu.write_log(msg) exit(1) currents = load_graph(currentList, graphType='graph/network', datatype=npy.float64) numLinks = currents.shape[0] for x in range(0, numLinks): corex = currents[x, 0] corey = currents[x, 1] #linkId = LT[x,cfg.LTB_LINKID] row = lu.get_links_from_core_pairs(linkTable, corex, corey) #row = lu.get_linktable_row(linkId, linkTable) linkTable[row, cfg.LTB_CURRENT] = currents[x, 2] coreCurrentFN = 'Circuitscape_network_node_currents_cum.txt' nodeCurrentList = path.join(OUTCENTRALITYDIR, coreCurrentFN) nodeCurrents = load_graph(nodeCurrentList, graphType='graph/network', datatype=npy.float64) numNodeCurrents = nodeCurrents.shape[0] rows = arcpy.UpdateCursor(coreCopy) row = rows.newRow() for row in rows: coreID = row.getValue(cfg.COREFN) for i in range(0, numNodeCurrents): if coreID == nodeCurrents[i, 0]: row.setValue("CF_Central", nodeCurrents[i, 1]) break rows.updateRow(row) #row = rows.newRow() del row, rows gprint('Done with centrality calculations.') finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=5, thisStep=7) linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile) linkTableFinalFile = path.join(cfg.OUTPUTDIR, cfg.PREFIX + "_linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFinalFile, inLinkTableFile) gprint('Copy of final linkTable written to ' + linkTableFinalFile) finalCoreFile = path.join(cfg.CORECENTRALITYGDB, cfg.PREFIX + '_Cores') #copy core area map to gdb. if not arcpy.Exists(cfg.CORECENTRALITYGDB): arcpy.CreateFileGDB_management( cfg.OUTPUTDIR, path.basename(cfg.CORECENTRALITYGDB)) arcpy.CopyFeatures_management(coreCopy, finalCoreFile) gprint('Creating shapefiles with linework for links.') lu.write_link_maps(linkTableFinalFile, step=7) # Copy final link maps to gdb and clean up. lu.copy_final_link_maps(step=7) # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Failed in step 7. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except Exception: lu.dashline(1) gprint('****Failed in step 7. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def STEP2_build_network(): """Generates initial version of linkTable.csv based on euclidean distances and adjacencies of core areas. """ try: lu.dashline(1) gprint('Running script ' + _SCRIPT_NAME) outlinkTableFile = lu.get_this_step_link_table(step=2) # Warning flag for missing distances in conefor file # dropFlag = False # ------------------------------------------------------------------ # adjacency file created from s1_getAdjacencies.py if cfg.S2ADJMETH_EU and not path.exists(cfg.EUCADJFILE): msg = ('\nERROR: Euclidean adjacency file required from ' 'Step 1: ' + cfg.EUCADJFILE) lu.raise_error(msg) # ------------------------------------------------------------------ # adjacency file created from s1_getAdjacencies.py if cfg.S2ADJMETH_CW and not path.exists(cfg.CWDADJFILE): msg = ('\nERROR: Cost-weighted adjacency file required from' 'Step 1: ' + cfg.CWDADJFILE) lu.raise_error(msg) #---------------------------------------------------------------------- # Load eucDists matrix from file and npy.sort if cfg.S2EUCDISTFILE is None: eucdist_file = generate_distance_file() else: eucdist_file = cfg.S2EUCDISTFILE eucDists_in = npy.loadtxt(eucdist_file, dtype='Float64', comments='#') if eucDists_in.size == 3: # If just one line in file eucDists = npy.zeros((1, 3), dtype='Float64') eucDists[0, :] = eucDists_in numDists = 1 else: eucDists = eucDists_in numDists = eucDists.shape[0] del eucDists_in eucDists[:, 0:2] = npy.sort(eucDists[:, 0:2]) ind = npy.lexsort((eucDists[:, 2], eucDists[:, 1], eucDists[:, 0])) eucDists = eucDists[ind] gprint('Core area distance list loaded.') gprint('number of pairwise distances = ' + str(numDists)) # sort eucDists by 1st column then by 2nd then by 3rd #---------------------------------------------------------------------- # Get rid of duplicate pairs of cores, retaining MINIMUM distance # between them numDistsOld = numDists for x in range(numDists - 2, -1, -1): if (eucDists[x, 0] == eucDists[x + 1, 0] and (eucDists[x, 1] == eucDists[x + 1, 1])): eucDists[x + 1, 0] = 0 delRows = npy.asarray(npy.where(eucDists[:, 0] == 0)) delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32") delRowsVector[:] = delRows[0, :] eucDists = lu.delete_row(eucDists, delRowsVector) del delRows del delRowsVector numDists = eucDists.shape[0] lu.dashline(1) gprint('Removed ' + str(numDistsOld - numDists) + ' duplicate core pairs in Euclidean distance table.' '\n') maxEucDistID = max(eucDists[:, 1]) gprint('After removing duplicates and distances that exceed' ' maximum, \nthere are ' + str(numDists) + ' pairwise distances. Max core ID number is ' + str(int(maxEucDistID)) + '.') # Begin creating and manipulating linktables # zeros and many other array functions are imported from numpy linkTable = npy.zeros((len(eucDists), 10), dtype='int32') linkTable[:, 1:3] = eucDists[:, 0:2] linkTable[:, cfg.LTB_EUCDIST] = eucDists[:, 2] #---------------------------------------------------------------------- # Get adjacencies using adj files from step 1. if cfg.S2ADJMETH_CW or cfg.S2ADJMETH_EU: # Keep ALL links cwdAdjList = [] eucAdjList = [] if cfg.S2ADJMETH_CW: cwdAdjTable = get_adj_list(cfg.CWDADJFILE) cwdAdjList = [] for i in range(0, len(cwdAdjTable)): listEntry = (str(cwdAdjTable[i, 0]) + '_' + str(cwdAdjTable[i, 1])) cwdAdjList.append(listEntry) gprint('Cost-weighted adjacency file loaded.') maxCwdAdjCoreID = max(cwdAdjTable[:, 1]) del cwdAdjTable if cfg.S2ADJMETH_EU: eucAdjTable = get_adj_list(cfg.EUCADJFILE) eucAdjList = [] for i in range(0, len(eucAdjTable)): listEntry = (str(eucAdjTable[i, 0]) + '_' + str(eucAdjTable[i, 1])) eucAdjList.append(listEntry) maxEucAdjCoreID = max(eucAdjTable[:, 1]) del eucAdjTable # maxCoreId = max(maxEucAdjCoreID, maxCwdAdjCoreID, maxEucDistID) del eucDists gprint('Creating link table') linkTable[:, cfg.LTB_CWDADJ] = -1 # Euc adjacency not evaluated linkTable[:, cfg.LTB_EUCADJ] = -1 if cfg.S2ADJMETH_CW or cfg.S2ADJMETH_EU: for x in range(0, linkTable.shape[0]): listEntry = (str(linkTable[x, cfg.LTB_CORE1]) + '_' + str(linkTable[x, cfg.LTB_CORE2])) if listEntry in cwdAdjList: linkTable[x, cfg.LTB_CWDADJ] = 1 else: linkTable[x, cfg.LTB_CWDADJ] = 0 if listEntry in eucAdjList: linkTable[x, cfg.LTB_EUCADJ] = 1 else: linkTable[x, cfg.LTB_EUCADJ] = 0 if cfg.S2ADJMETH_CW and cfg.S2ADJMETH_EU: # "Keep all adjacent links" gprint("\nKeeping all adjacent links\n") rows = [] for row in range(0, linkTable.shape[0]): if (linkTable[row, cfg.LTB_EUCADJ] == 0 and linkTable[row, cfg.LTB_CWDADJ] == 0): rows.append(row) linkTable = lu.delete_row(linkTable, rows) elif cfg.S2ADJMETH_CW: gprint("\nKeeping cost-weighted adjacent links\n") delRows = npy.asarray(npy.where(linkTable[:, cfg.LTB_CWDADJ] == 0)) delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32") delRowsVector[:] = delRows[0, :] linkTable = lu.delete_row(linkTable, delRowsVector) elif cfg.S2ADJMETH_EU: gprint("\nKeeping Euclidean adjacent links\n") delRows = npy.asarray(npy.where(linkTable[:, cfg.LTB_EUCADJ] == 0)) delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32") delRowsVector[:] = delRows[0, :] linkTable = lu.delete_row(linkTable, delRowsVector) else: # For Climate Corridor tool gprint("\nIgnoring adjacency and keeping all links\n") # if dropFlag: # lu.dashline(1) # gprint('NOTE: At least one adjacent link was dropped ' # 'because there was no Euclidean ') # gprint('distance value in the input distance file from ' # 'Conefor extension.') # lu.dashline(2) linkTable[:, cfg.LTB_CLUST1] = -1 # No clusters until later steps linkTable[:, cfg.LTB_CLUST2] = -1 # not evaluated yet. May eventually have ability to get lcdistances # for adjacent cores from s1_getAdjacencies.py linkTable[:, cfg.LTB_CWDIST] = -1 # Get list of core IDs, based on core area shapefile. coreList = lu.get_core_list(cfg.COREFC, cfg.COREFN) if len(npy.unique(coreList[:, 1])) < 2: lu.dashline(1) msg = ('\nERROR: There are less than two core ' 'areas.\nThis means there is nothing to connect ' 'with linkages. Bailing.') lu.raise_error(msg) # Set cfg.LTB_LINKTYPE to valid corridor code linkTable[:, cfg.LTB_LINKTYPE] = cfg.LT_CORR # Make sure linkTable is sorted ind = npy.lexsort((linkTable[:, cfg.LTB_CORE2], linkTable[:, cfg.LTB_CORE1])) if len(linkTable) == 0: msg = ('\nERROR: There are no valid core area ' 'pairs. This can happen when core area numbers in ' 'your Conefor distances text file do not match ' 'those in your core area feature class.') lu.raise_error(msg) linkTable = linkTable[ind] # Assign link IDs in order for x in range(len(linkTable)): linkTable[x, cfg.LTB_LINKID] = x + 1 #---------------------------------------------------------------------- if cfg.CONNECTFRAGS: connect_clusters(linkTable) else: # Drop links that are too long gprint('\nChecking for corridors that are too long to map.') DISABLE_LEAST_COST_NO_VAL = False linkTable, numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST, 0, cfg.MINEUCDIST, 0, DISABLE_LEAST_COST_NO_VAL) if numDroppedLinks > 0: lu.dashline(1) gprint('Removed ' + str(numDroppedLinks) + ' links that were too long in Euclidean ' 'distance.') # Write linkTable to disk gprint('Writing ' + outlinkTableFile) lu.write_link_table(linkTable, outlinkTableFile) linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s2.csv") lu.write_link_table(linkTable, linkTableLogFile) lu.report_links(linkTable) gprint('Creating shapefiles with linework for links.\n') try: lu.write_link_maps(outlinkTableFile, step=2) except: lu.write_link_maps(outlinkTableFile, step=2) gprint('Linework shapefiles written.') # if dropFlag: # print_conefor_warning() # Return GEOPROCESSING specific errors except arcgisscripting.ExecuteError: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 2. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return
def STEP7_calc_centrality(): """ Analyze network centrality using Circuitscape given Linkage Mapper outputs """ try: lu.dashline(0) gprint('Running script ' + _SCRIPT_NAME) arcpy.env.workspace = cfg.SCRATCHDIR # Check for valid LCP shapefile prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep = 7) if not arcpy.Exists(prevLcpShapefile): msg = ('Cannot find an LCP shapefile from step 5. Please ' 'rerun that step and any previous ones if necessary.') lu.raise_error(msg) # Remove lcp shapefile from this step if run previously lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s7.shp") lu.delete_data(lcpShapefile) csPath = lu.get_cs_path() invalidFNs = ['fid','id','oid','shape'] if cfg.COREFN.lower() in invalidFNs: #if cfg.COREFN == 'FID' or cfg.COREFN == 'ID': lu.dashline(1) msg = ('ERROR: Core area field names ID, FID, SHAPE, and OID are' ' reserved for ArcGIS. \nPlease choose another field- must' ' be a positive integer.') lu.raise_error(msg) lu.dashline(1) gprint('Mapping centrality of network cores and links' '\nusing Circuitscape....') lu.dashline(0) # set the analysis extent and cell size to that of the resistance # surface coreCopy = path.join(cfg.SCRATCHDIR, 'cores.shp') arcpy.CopyFeatures_management(cfg.COREFC, coreCopy) arcpy.AddField_management(coreCopy, "CF_Central", "DOUBLE", "10", "2") inLinkTableFile = lu.get_prev_step_link_table(step=7) linkTable = lu.load_link_table(inLinkTableFile) numLinks = linkTable.shape[0] numCorridorLinks = lu.report_links(linkTable) if numCorridorLinks == 0: lu.dashline(1) msg =('\nThere are no linkages. Bailing.') lu.raise_error(msg) if linkTable.shape[1] < 16: # If linktable has no entries from prior # centrality or pinchpint analyses extraCols = npy.zeros((numLinks, 6), dtype="float64") linkTable = linkTable[:,0:10] linkTable = npy.append(linkTable, extraCols, axis=1) linkTable[:, cfg.LTB_LCPLEN] = -1 linkTable[:, cfg.LTB_CWDEUCR] = -1 linkTable[:, cfg.LTB_CWDPATHR] = -1 linkTable[:, cfg.LTB_EFFRESIST] = -1 linkTable[:, cfg.LTB_CWDTORR] = -1 del extraCols linkTable[:, cfg.LTB_CURRENT] = -1 coresToProcess = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]) maxCoreNum = max(coresToProcess) del coresToProcess lu.dashline(0) coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1] coreList = npy.sort(coreList) #gprint('There are ' + str(len(npy.unique(coreList))) ' core areas.') # set up directory for centrality INCENTRALITYDIR = cfg.CENTRALITYBASEDIR OUTCENTRALITYDIR = path.join(cfg.CENTRALITYBASEDIR, cfg.CIRCUITOUTPUTDIR_NM) CONFIGDIR = path.join(INCENTRALITYDIR, cfg.CIRCUITCONFIGDIR_NM) # Set Circuitscape options and write config file options = lu.setCircuitscapeOptions() options['data_type']='network' options['habitat_file'] = path.join(INCENTRALITYDIR, 'Circuitscape_graph.txt') # Setting point file equal to graph to do all pairs in Circuitscape options['point_file'] = path.join(INCENTRALITYDIR, 'Circuitscape_graph.txt') outputFN = 'Circuitscape_network.out' options['output_file'] = path.join(OUTCENTRALITYDIR, outputFN) configFN = 'Circuitscape_network.ini' outConfigFile = path.join(CONFIGDIR, configFN) lu.writeCircuitscapeConfigFile(outConfigFile, options) delRows = npy.asarray(npy.where(linkTable[:,cfg.LTB_LINKTYPE] < 1)) delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32") delRowsVector[:] = delRows[0, :] LT = lu.delete_row(linkTable, delRowsVector) del delRows del delRowsVector graphList = npy.zeros((LT.shape[0],3), dtype="float64") graphList[:,0] = LT[:,cfg.LTB_CORE1] graphList[:,1] = LT[:,cfg.LTB_CORE2] graphList[:,2] = LT[:,cfg.LTB_CWDIST] write_graph(options['habitat_file'] ,graphList) gprint('\nCalculating current flow centrality using Circuitscape...') #subprocess.call([csPath, outConfigFile], shell=True) memFlag = call_circuitscape(csPath, outConfigFile) outputFN = 'Circuitscape_network_branch_currents_cum.txt' currentList = path.join(OUTCENTRALITYDIR, outputFN) if not arcpy.Exists(currentList): write_graph(options['habitat_file'] ,graphList) gprint('\nCalculating current flow centrality using Circuitscape ' '(2nd try)...') # subprocess.call([csPath, outConfigFile], shell=True) memFlag = call_circuitscape(csPath, outConfigFile) if not arcpy.Exists(currentList): lu.dashline(1) msg = ('ERROR: No Circuitscape output found.\n' 'It looks like Circuitscape failed.') arcpy.AddError(msg) lu.write_log(msg) exit(1) currents = load_graph(currentList,graphType='graph/network', datatype='float64') numLinks = currents.shape[0] for x in range(0,numLinks): corex = currents[x,0] corey = currents[x,1] #linkId = LT[x,cfg.LTB_LINKID] row = lu.get_links_from_core_pairs(linkTable, corex, corey) #row = lu.get_linktable_row(linkId, linkTable) linkTable[row,cfg.LTB_CURRENT] = currents[x,2] coreCurrentFN = 'Circuitscape_network_node_currents_cum.txt' nodeCurrentList = path.join(OUTCENTRALITYDIR, coreCurrentFN) nodeCurrents = load_graph(nodeCurrentList,graphType='graph/network', datatype='float64') numNodeCurrents = nodeCurrents.shape[0] rows = arcpy.UpdateCursor(coreCopy) row = rows.newRow() for row in rows: coreID = row.getValue(cfg.COREFN) for i in range (0, numNodeCurrents): if coreID == nodeCurrents[i,0]: row.setValue("CF_Central", nodeCurrents[i,1]) break rows.updateRow(row) #row = rows.newRow() del row, rows gprint('Done with centrality calculations.') finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=5, thisStep=7) linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile) linkTableFinalFile = path.join(cfg.OUTPUTDIR, cfg.PREFIX + "_linkTable_s5_plus.csv") lu.write_link_table(finalLinkTable, linkTableFinalFile, inLinkTableFile) gprint('Copy of final linkTable written to '+ linkTableFinalFile) finalCoreFile = path.join(cfg.CORECENTRALITYGDB, cfg.PREFIX + '_Cores') #copy core area map to gdb. if not arcpy.Exists(cfg.CORECENTRALITYGDB): arcpy.CreateFileGDB_management(cfg.OUTPUTDIR, path.basename(cfg.CORECENTRALITYGDB)) arcpy.CopyFeatures_management(coreCopy, finalCoreFile) gprint('Creating shapefiles with linework for links.') lu.write_link_maps(linkTableFinalFile, step=7) # Copy final link maps to gdb and clean up. lu.copy_final_link_maps(step=7) # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Failed in step 7. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except: lu.dashline(1) gprint('****Failed in step 7. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME) return