Exemplo n.º 1
0
 def delete_final_gdb(finalgdb):
     """Deletes final geodatabase"""
     if gp.Exists(finalgdb) and cfg.STEP5:
         try:
             lu.clean_out_workspace(finalgdb)
         except:
             lu.dashline(1)
             msg = ('ERROR: Could not remove contents of geodatabase ' +
                    finalgdb + '. \nIs it open in ArcMap? You may '
                    'need to re-start ArcMap to release the file lock.')
             lu.raise_error(msg)
         lu.delete_dir(finalgdb)
Exemplo n.º 2
0
 def delete_final_gdb(finalgdb):
     """Deletes final geodatabase"""
     if gp.Exists(finalgdb) and cfg.STEP5:
         try:
             lu.clean_out_workspace(finalgdb)
         except:
             lu.dashline(1)
             msg = ('ERROR: Could not remove contents of geodatabase ' +
                    finalgdb + '. \nIs it open in ArcMap? You may '
                    'need to re-start ArcMap to release the file lock.')
             lu.raise_error(msg)
         lu.delete_dir(finalgdb)
Exemplo n.º 3
0
def STEP8_calc_pinchpoints():
    """ Maps pinch points in Linkage Mapper corridors using Circuitscape
        given CWD calculations from s3_calcCwds.py.

    """
    try:
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)
        
        restartFlag = False
        if cfg.CWDCUTOFF < 0:
            cfg.CWDCUTOFF = cfg.CWDCUTOFF * -1
            restartFlag = True # Restart code in progress
        
        CSPATH = lu.get_cs_path()                
        outputGDB = path.join(cfg.OUTPUTDIR, path.basename(cfg.PINCHGDB))
        
        arcpy.OverWriteOutput = True
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterstatistics = "NONE"

        # set the analysis extent and cell size to that of the resistance
        # surface
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = cfg.RESRAST
        arcpy.snapraster = cfg.RESRAST

        resRaster = cfg.RESRAST
        arcpy.env.extent = "MINOF"

        
        minObject = arcpy.GetRasterProperties_management(resRaster, "MINIMUM") 
        rasterMin = float(str(minObject.getOutput(0)))
        if rasterMin <= 0:
            msg = ('Error: resistance raster cannot have 0 or negative values.')
            lu.raise_error(msg)
                
        if cfg.DO_ADJACENTPAIRS:
            prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep = 8)
            if not arcpy.Exists(prevLcpShapefile):
                msg = ('Cannot find an LCP shapefile from step 5.  Please '
                        'rerun that step and any previous ones if necessary.')
                lu.raise_error(msg)

            # Remove lcp shapefile
            lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s8.shp")
            lu.delete_data(lcpShapefile)

        inLinkTableFile = lu.get_prev_step_link_table(step=8)
        linkTable = lu.load_link_table(inLinkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg =('\nThere are no linkages. Bailing.')
            lu.raise_error(msg)

        if linkTable.shape[1] < 16: # If linktable has no entries from prior
                                    # centrality or pinchpint analyses
            extraCols = npy.zeros((numLinks, 6), dtype="float64")
            linkTable = linkTable[:,0:10]
            linkTable = npy.append(linkTable, extraCols, axis=1)
            linkTable[:, cfg.LTB_LCPLEN] = -1
            linkTable[:, cfg.LTB_CWDEUCR] = -1
            linkTable[:, cfg.LTB_CWDPATHR] = -1
            linkTable[:, cfg.LTB_EFFRESIST] = -1
            linkTable[:, cfg.LTB_CWDTORR] = -1
            linkTable[:, cfg.LTB_CURRENT] = -1
            del extraCols

        # set up directories for circuit and circuit mosaic grids
        # Create output geodatabase
        if not arcpy.Exists(cfg.PINCHGDB):
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR,
                                            path.basename(cfg.PINCHGDB))

        mosaicRaster = path.join(cfg.CIRCUITBASEDIR, "current_mos" + tif)
        coresToProcess = npy.unique(
                                linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        maxCoreNum = max(coresToProcess)
        del coresToProcess

        lu.dashline(0)
        coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1]
        coreList = npy.sort(coreList)
        #gprint('There are ' + str(len(npy.unique(coreList))) ' core areas.')

        INCIRCUITDIR = cfg.CIRCUITBASEDIR
        OUTCIRCUITDIR = path.join(cfg.CIRCUITBASEDIR,
                                  cfg.CIRCUITOUTPUTDIR_NM)
        CONFIGDIR = path.join(INCIRCUITDIR, cfg.CIRCUITCONFIGDIR_NM)

        # Cutoff value text to append to filenames
        cutoffText = str(cfg.CWDCUTOFF)
        if cutoffText[-6:] == '000000':
            cutoffText = cutoffText[0:-6]+'m' 
        elif cutoffText[-3:] == '000':
            cutoffText = cutoffText[0:-3]+'k' 

        if cfg.SQUARERESISTANCES:
            # Square resistance values
            squaredRaster = path.join(cfg.SCRATCHDIR,'res_sqr')
            arcpy.env.workspace = cfg.SCRATCHDIR
            arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
            outRas = Raster(resRaster) * Raster(resRaster)
            outRas.save(squaredRaster)
            resRaster = squaredRaster

        if cfg.DO_ADJACENTPAIRS:
            linkLoop = 0
            lu.dashline(1)
            gprint('Mapping pinch points in individual corridors \n'
                    'using Circuitscape.')
            lu.dashline(1)
            gprint('If you try to cancel your run and the Arc dialog hangs, ')
            gprint('you can kill Circuitscape by opening Windows Task Manager')
            gprint('and ending the cs_run.exe process.')                    
            lu.dashline(2)

            for x in range(0,numLinks):            
                linkId = str(int(linkTable[x,cfg.LTB_LINKID]))
                if not (linkTable[x,cfg.LTB_LINKTYPE] > 0):
                    continue
                linkLoop = linkLoop + 1
                linkDir = path.join(cfg.SCRATCHDIR, 'link' + linkId)
                if restartFlag == True and path.exists(linkDir):
                    gprint('continuing')
                    continue
                restartFlag = False
                lu.create_dir(linkDir)
                start_time1 = time.clock()

                # source and target cores
                corex=int(coreList[x,0])
                corey=int(coreList[x,1])

                # Get cwd rasters for source and target cores
                cwdRaster1 = lu.get_cwd_path(corex)
                cwdRaster2 = lu.get_cwd_path(corey)

                lccNormRaster = path.join(linkDir, 'lcc_norm')
                arcpy.env.extent = "MINOF"

                link = lu.get_links_from_core_pairs(linkTable, corex,
                                                    corey)
                lcDist = float(linkTable[link,cfg.LTB_CWDIST])

                # Normalized lcc rasters are created by adding cwd rasters
                # and subtracting the least cost distance between them.
                outRas = Raster(cwdRaster1) + Raster(cwdRaster2) - lcDist
                outRas.save(lccNormRaster)

                #create raster mask
                resMaskRaster = path.join(linkDir, 'res_mask'+tif)

                #create raster mask
                outCon = arcpy.sa.Con(Raster(lccNormRaster) <= cfg.CWDCUTOFF, 1)
                outCon.save(resMaskRaster)

                # Convert to poly.  Use as mask to clip resistance raster.
                resMaskPoly = path.join(linkDir,
                                        'res_mask_poly.shp')
                arcpy.RasterToPolygon_conversion(resMaskRaster, resMaskPoly,
                                              "NO_SIMPLIFY")
                arcpy.env.extent = resMaskPoly

                # Includes 0 values in some cases with CP LI model if tif
                # so using ESRI Grid format
                resClipRasterMasked = path.join(linkDir,
                                                'res_clip_m') 
                # Extract masked resistance raster.  
                # Needs to be float to get export to npy to work.
                outRas = arcpy.sa.ExtractByMask(resRaster, resMaskPoly) + 0.0 
                outRas.save(resClipRasterMasked)
               
                resNpyFN = 'resistances_link_' + linkId + '.npy'
                resNpyFile = path.join(INCIRCUITDIR, resNpyFN)
                numElements, numResistanceNodes = export_ras_to_npy(resClipRasterMasked,
                                                          resNpyFile)
                
                totMem, availMem = lu.get_mem()
                # gprint('Total memory: str(totMem))
                if numResistanceNodes / availMem > 2000000:
                    lu.dashline(1)
                    gwarn('Warning:')
                    gwarn('Circuitscape can only solve 2-3 million nodes')
                    gwarn('per gigabyte of available RAM. \nTotal physical RAM'
                            ' on your machine is ~' + str(totMem) 
                            + ' GB. \nAvailable memory is ~'+ str(availMem) 
                            + ' GB. \nYour resistance raster has '
                            + str(numResistanceNodes) + ' nodes.')                                                          
                    lu.dashline(2)
                corePairRaster = path.join(linkDir, 'core_pairs'+tif)
                arcpy.env.extent = resClipRasterMasked

                # Next result needs to be floating pt for numpy export
                outCon = arcpy.sa.Con(Raster(cwdRaster1) == 0, corex,
                            arcpy.sa.Con(Raster(cwdRaster2) == 0, corey + 0.0))
                outCon.save(corePairRaster)

                coreNpyFN = 'cores_link_' + linkId + '.npy'
                coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN)
                numElements, numNodes = export_ras_to_npy(corePairRaster,
                                                          coreNpyFile)

                arcpy.env.extent = "MINOF"

                # Set circuitscape options and call
                options = lu.setCircuitscapeOptions()
                if cfg.WRITE_VOLT_MAPS == True:
                    options['write_volt_maps']=True
                options['habitat_file'] = resNpyFile
                
                # if int(linkId) > 2:
                    # options['habitat_file'] = 'c:\\test.dummy'
                                
                options['point_file'] = coreNpyFile
                options['set_focal_node_currents_to_zero']=True
                outputFN = 'Circuitscape_link' + linkId + '.out'
                options['output_file'] = path.join(OUTCIRCUITDIR, outputFN)
                if numElements > 250000:
                    options['print_timings']=True
                configFN = 'pinchpoint_config' + linkId + '.ini'

                outConfigFile = path.join(CONFIGDIR, configFN)
                lu.writeCircuitscapeConfigFile(outConfigFile, options)                    
                gprint('Processing link ID #' + str(linkId) + '. Resistance map'
                        ' has ' + str(int(numResistanceNodes)) + ' nodes.') 

                memFlag = call_circuitscape(CSPATH, outConfigFile)
                      
                currentFN = ('Circuitscape_link' + linkId 
                            + '_cum_curmap.npy')
                currentMap = path.join(OUTCIRCUITDIR, currentFN)
                
                if not arcpy.Exists(currentMap):
                    print_failure(numResistanceNodes, memFlag, 10)
                    numElements, numNodes = export_ras_to_npy(
                                                resClipRasterMasked,resNpyFile)
                    memFlag = call_circuitscape(CSPATH, outConfigFile)

                    currentFN = ('Circuitscape_link' + linkId 
                                + '_cum_curmap.npy')
                    currentMap = path.join(OUTCIRCUITDIR, currentFN)
                
                if not arcpy.Exists(currentMap):                
                    msg = ('\nCircuitscape failed. See error information above.')
                    arcpy.AddError(msg)
                    lu.write_log(msg)
                    exit(1)

                # Either set core areas to nodata in current map or
                # divide each by its radius
                currentRaster = path.join(linkDir, "current" + tif)
                import_npy_to_ras(currentMap,corePairRaster,currentRaster)
                
                if cfg.WRITE_VOLT_MAPS == True:
                    voltFN = ('Circuitscape_link' + linkId + '_voltmap_'
                           + str(corex) + '_'+str(corey) + '.npy')
                    voltMap = path.join(OUTCIRCUITDIR, voltFN)
                    voltRaster = path.join(outputGDB,
                             cfg.PREFIX + "_voltMap_"+ str(corex) + '_'+str(corey))                
                    import_npy_to_ras(voltMap,corePairRaster,voltRaster)
                    gprint('Building output statistics and pyramids '
                                   'for voltage raster\n')
                    lu.build_stats(voltRaster) 
                    
                arcpy.env.extent = currentRaster

                if SETCORESTONULL:
                    # Set core areas to NoData in current map for color ramping
                    currentRaster2 = currentRaster + '2' + tif
                    outCon = arcpy.sa.Con(arcpy.sa.IsNull(Raster
                                      (corePairRaster)), Raster(currentRaster))
                    outCon.save(currentRaster2)
                    currentRaster = currentRaster2
                arcpy.env.extent = "MAXOF"
                if linkLoop == 1:
                    lu.delete_data(mosaicRaster)
                    @retry(10)
                    def copyRas2():
                        arcpy.CopyRaster_management(currentRaster,
                                                    mosaicRaster)
                    copyRas2()
                else:
                    @retry(10)
                    def mosaicRas():                
                        arcpy.Mosaic_management(currentRaster,
                                         mosaicRaster, "MAXIMUM", "MATCH")
                    mosaicRas()
                    
                resistancesFN = ('Circuitscape_link' + linkId
                            + '_resistances_3columns.out')

                resistancesFile = path.join(OUTCIRCUITDIR,resistancesFN)
                resistances = npy.loadtxt(resistancesFile,
                                          dtype = 'Float64', comments='#')

                resistance = float(str(arcpy.env.cellSize)) * resistances[2]
                linkTable[link,cfg.LTB_EFFRESIST] = resistance

                # Ratio
                if not cfg.SQUARERESISTANCES:
                    linkTable[link,cfg.LTB_CWDTORR] = (linkTable[link,
                           cfg.LTB_CWDIST] / linkTable[link,cfg.LTB_EFFRESIST])
                # Clean up
                if cfg.SAVE_TEMP_CIRCUIT_FILES == False:
                    lu.delete_file(coreNpyFile)
                    coreNpyBase, extension = path.splitext(coreNpyFile)
                    lu.delete_data(coreNpyBase + '.hdr')                    
                    lu.delete_file(resNpyFile)
                    resNpyBase, extension = path.splitext(resNpyFile)
                    lu.delete_data(resNpyBase + '.hdr')                    
                    lu.delete_file(currentMap)
                    curMapBase, extension = path.splitext(currentMap)
                    lu.delete_data(curMapBase + '.hdr')
                    lu.delete_data(currentRaster) 
                    lu.clean_out_workspace(linkDir)
                    lu.delete_dir(linkDir) 
                gprint('Finished with link ID #' + str(linkId) + '. ' + 
                        str(linkLoop) + ' out of ' + str(numCorridorLinks) + 
                        ' links have been processed.')
                start_time1 = lu.elapsed_time(start_time1)
                
            outputRaster = path.join(outputGDB, cfg.PREFIX + 
                                     "_current_adjacentPairs_" + cutoffText)
            lu.delete_data(outputRaster)
            
            @retry(10)
            def copyRas():
                arcpy.CopyRaster_management(mosaicRaster, outputRaster)
            copyRas()

            gprint('Building output statistics and pyramids '
                                  'for corridor pinch point raster\n')
            lu.build_stats(outputRaster)
            
            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=5,
                                                      thisStep=8)

            linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv")
            lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile)
            linkTableFinalFile = path.join(cfg.OUTPUTDIR, cfg.PREFIX + 
                                           "_linkTable_s5_plus.csv")
            lu.write_link_table(finalLinkTable,
                                linkTableFinalFile, inLinkTableFile)
            gprint('Copy of linkTable written to '+
                              linkTableFinalFile)
            #fixme: update sticks?

            gprint('Creating shapefiles with linework for links.')
            lu.write_link_maps(linkTableFinalFile, step=8)

            # Copy final link maps to gdb.
            lu.copy_final_link_maps(step=8)

            lu.delete_data(mosaicRaster)

        if not cfg.DO_ALLPAIRS:
            # Clean up temporary files
            if not cfg.SAVECURRENTMAPS:
                lu.delete_dir(OUTCIRCUITDIR)
            return

        lu.dashline(1)
        gprint('Mapping global pinch points among all\n'
                'core area pairs using Circuitscape.')                   
                
        if cfg.ALL_PAIR_SCENARIO=='pairwise':
            gprint('Circuitscape will be run in PAIRWISE mode.')
                        
        else:
            gprint('Circuitscape will be run in ALL-TO-ONE mode.')     
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = cfg.RESRAST

        S8CORE_RAS = "s8core_ras"
        s8CoreRasPath = path.join(cfg.SCRATCHDIR,S8CORE_RAS)

        arcpy.FeatureToRaster_conversion(cfg.COREFC, cfg.COREFN,
                                         s8CoreRasPath, arcpy.env.cellSize)
        binaryCoreRaster = path.join(cfg.SCRATCHDIR,"core_ras_bin")

        # The following commands cause file lock problems on save.  using gp
        # instead.
        # outCon = arcpy.sa.Con(S8CORE_RAS, 1, "#", "VALUE > 0")
        # outCon.save(binaryCoreRaster)
        # gp.Con_sa(s8CoreRasPath, 1, binaryCoreRaster, "#", "VALUE > 0")
        outCon = arcpy.sa.Con(Raster(s8CoreRasPath) > 0, 1)
        outCon.save(binaryCoreRaster)
        s5corridorRas = path.join(cfg.OUTPUTGDB,cfg.PREFIX + "_corridors")
        
        if not arcpy.Exists(s5corridorRas):
            s5corridorRas = path.join(cfg.OUTPUTGDB,cfg.PREFIX + 
                                      "_lcc_mosaic_int")

        outCon = arcpy.sa.Con(Raster(s5corridorRas) <= cfg.CWDCUTOFF, Raster(
                              resRaster), arcpy.sa.Con(Raster(
                              binaryCoreRaster) > 0, Raster(resRaster)))

        resRasClipPath = path.join(cfg.SCRATCHDIR,'res_ras_clip')
        outCon.save(resRasClipPath)

        arcpy.env.cellSize = resRasClipPath
        arcpy.env.extent = resRasClipPath
        s8CoreRasClipped = s8CoreRasPath + '_c'

        # Produce core raster with same extent as clipped resistance raster
        # added to ensure correct data type- nodata values were positive for 
        # cores otherwise
        outCon = arcpy.sa.Con(arcpy.sa.IsNull(Raster(s8CoreRasPath)), 
                              -9999, Raster(s8CoreRasPath))  
        outCon.save(s8CoreRasClipped)

        resNpyFN = 'resistances.npy'
        resNpyFile = path.join(INCIRCUITDIR, resNpyFN)
        numElements, numResistanceNodes = export_ras_to_npy(resRasClipPath,resNpyFile)

        totMem, availMem = lu.get_mem()
        # gprint('Total memory: str(totMem))
        if numResistanceNodes / availMem > 2000000:
            lu.dashline(1)
            gwarn('Warning:')
            gwarn('Circuitscape can only solve 2-3 million nodes')
            gwarn('per gigabyte of available RAM. \nTotal physical RAM '
                    'on your machine is ~' + str(totMem)
                    + ' GB. \nAvailable memory is ~'+ str(availMem)
                    + ' GB. \nYour resistance raster has '
                    + str(numResistanceNodes) + ' nodes.')   
            lu.dashline(0)

        coreNpyFN = 'cores.npy'
        coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN)
        numElements, numNodes = export_ras_to_npy(s8CoreRasClipped,coreNpyFile)

        arcpy.env.extent = "MINOF"

        options = lu.setCircuitscapeOptions()
        options['scenario']=cfg.ALL_PAIR_SCENARIO
        options['habitat_file'] = resNpyFile
        options['point_file'] = coreNpyFile
        options['set_focal_node_currents_to_zero']=True
        outputFN = 'Circuitscape.out'
        options['output_file'] = path.join(OUTCIRCUITDIR, outputFN)
        options['print_timings']=True
        configFN = 'pinchpoint_allpair_config.ini'
        outConfigFile = path.join(CONFIGDIR, configFN)
        lu.writeCircuitscapeConfigFile(outConfigFile, options)
        gprint('\nResistance map has ' + str(int(numResistanceNodes)) + ' nodes.') 
        lu.dashline(1)
        gprint('If you try to cancel your run and the Arc dialog hangs, ')
        gprint('you can kill Circuitscape by opening Windows Task Manager')
        gprint('and ending the cs_run.exe process.')             
        lu.dashline(0)
        
        call_circuitscape(CSPATH, outConfigFile)
        # test = subprocess.call([CSPATH, outConfigFile],
                               # creationflags = subprocess.CREATE_NEW_CONSOLE)

        if options['scenario']=='pairwise':
            rasterSuffix =  "_current_allPairs_" + cutoffText
                        
        else:
            rasterSuffix =  "_current_allToOne_" + cutoffText

        currentFN = 'Circuitscape_cum_curmap.npy'
        currentMap = path.join(OUTCIRCUITDIR, currentFN)
        outputRaster = path.join(outputGDB, cfg.PREFIX + rasterSuffix)
        currentRaster = path.join(cfg.SCRATCHDIR, "current")

        try:
            import_npy_to_ras(currentMap,resRasClipPath,outputRaster)
        except:
            lu.dashline(1)
            msg = ('ERROR: Circuitscape failed. \n'
                  'Note: Circuitscape can only solve 2-3 million nodes'
                  '\nper gigabyte of available RAM. The resistance '
                  '\nraster for the last corridor had '
                  + str(numResistanceNodes) + ' nodes.\n\nResistance '
                  'raster values that vary by >6 orders of \nmagnitude'
                  ' can also cause failures, as can a mismatch in '
                  '\ncore area and resistance raster extents.')
            arcpy.AddError(msg)
            lu.write_log(msg)
            exit(1)

        #set core areas to nodata 
        if SETCORESTONULL:                  
            # Set core areas to NoData in current map for color ramping
            outputRasterND = outputRaster + '_noDataCores' 
            outCon = arcpy.sa.SetNull(Raster(s8CoreRasClipped) > 0, 
                                      Raster(outputRaster))   
            outCon.save(outputRasterND)                

        gprint('\nBuilding output statistics and pyramids ' 
                'for centrality raster.')        
        lu.build_stats(outputRaster)
        lu.build_stats(outputRasterND)

        # Clean up temporary files
        if not cfg.SAVECURRENTMAPS:
            lu.delete_dir(OUTCIRCUITDIR)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 8. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 8. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 4
0
def STEP6_calc_barriers():
    """Detects influential barriers given CWD calculations from
       s3_calcCwds.py.

    """

    try:
        arcpy.CheckOutExtension("spatial")
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)

        if cfg.BARRIER_CWD_THRESH is not None:
            lu.dashline(1)
            gprint('Invoking CWD Threshold of ' + str(cfg.BARRIER_CWD_THRESH) +
                   ' map units.')

        if cfg.SUM_BARRIERS:
            sumSuffix = '_Sum'
            cfg.BARRIERBASEDIR = cfg.BARRIERBASEDIR + sumSuffix
            baseName, extension = path.splitext(cfg.BARRIERGDB)
            cfg.BARRIERGDB = baseName + sumSuffix + extension

            gprint('\nBarrier scores will be SUMMED across core pairs.')
        else:
            sumSuffix = ''

        # Delete contents of final ouptut geodatabase
        # lu.clean_out_workspace(cfg.BARRIERGDB) #xxx try not doing this to allow multiple radii to be analyzed in separate runs
        if not arcpy.Exists(cfg.BARRIERGDB):
            # Create output geodatabase
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR,
                                           path.basename(cfg.BARRIERGDB))

        startRadius = int(cfg.STARTRADIUS)
        endRadius = int(cfg.ENDRADIUS)
        radiusStep = int(cfg.RADIUSSTEP)
        if radiusStep == 0:
            endRadius = startRadius  # Calculate at just one radius value
            radiusStep = 1
        linkTableFile = lu.get_prev_step_link_table(step=6)
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.RefreshCatalog(cfg.PROJECTDIR)
        PREFIX = path.basename(cfg.PROJECTDIR)
        # For speed:
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # set the analysis extent and cell size to that of the resistance
        # surface
        arcpy.OverWriteOutput = True
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = cfg.RESRAST
        arcpy.env.snapRaster = cfg.RESRAST
        spatialref = arcpy.Describe(cfg.RESRAST).spatialReference
        mapUnits = (str(spatialref.linearUnitName)).lower()
        if len(mapUnits) > 1 and mapUnits[-1] != 's':
            mapUnits = mapUnits + 's'

        if float(arcpy.env.cellSize) > startRadius or startRadius > endRadius:
            msg = ('Error: minimum detection radius must be greater than '
                   'cell size (' + str(arcpy.env.cellSize) +
                   ') \nand less than or equal to maximum detection radius.')
            lu.raise_error(msg)

        linkTable = lu.load_link_table(linkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg = ('\nThere are no linkages. Bailing.')
            lu.raise_error(msg)

        # set up directories for barrier and barrier mosaic grids
        dirCount = 0
        gprint("Creating intermediate output folder: " + cfg.BARRIERBASEDIR)
        lu.delete_dir(cfg.BARRIERBASEDIR)
        lu.create_dir(cfg.BARRIERBASEDIR)
        arcpy.CreateFolder_management(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM)
        cbarrierdir = path.join(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM)

        coresToProcess = npy.unique(linkTable[:,
                                              cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        maxCoreNum = max(coresToProcess)

        # Set up focal directories.
        # To keep there from being > 100 grids in any one directory,
        # outputs are written to:
        # barrier\focalX_ for cores 1-99 at radius X
        # barrier\focalX_1 for cores 100-199
        # etc.
        lu.dashline(0)

        for radius in range(startRadius, endRadius + 1, radiusStep):
            core1path = lu.get_focal_path(1, radius)
            path1, dir1 = path.split(core1path)
            path2, dir2 = path.split(path1)
            arcpy.CreateFolder_management(path.dirname(path2),
                                          path.basename(path2))
            arcpy.CreateFolder_management(path.dirname(path1),
                                          path.basename(path1))

            if maxCoreNum > 99:
                gprint('Creating subdirectories for ' + str(radius) + ' ' +
                       str(mapUnits) + ' radius analysis scale.')
                maxDirCount = int(maxCoreNum / 100)
                focalDirBaseName = dir2

                cp100 = (coresToProcess.astype('int32')) / 100
                ind = npy.where(cp100 > 0)
                dirNums = npy.unique(cp100[ind])
                for dirNum in dirNums:
                    focalDir = focalDirBaseName + str(dirNum)
                    gprint('...' + focalDir)
                    arcpy.CreateFolder_management(path2, focalDir)

        # Create resistance raster with filled-in Nodata values for later use
        arcpy.env.extent = cfg.RESRAST
        resistFillRaster = path.join(cfg.SCRATCHDIR, "resist_fill")
        output = arcpy.sa.Con(IsNull(cfg.RESRAST), 1000000000,
                              Raster(cfg.RESRAST) - 1)
        output.save(resistFillRaster)

        coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]
        coreList = npy.sort(coreList)

        # Loop through each search radius to calculate barriers in each link
        import time
        radId = 0  #keep track of number of radii processed- used for temp dir naming
        for radius in range(startRadius, endRadius + 1, radiusStep):
            radId = radId + 1
            linkTableTemp = linkTable.copy()

            @retry(10)
            #can't pass vars in and modify them.
            def doRadiusLoop():
                linkTable = linkTableTemp.copy()
                startTime = time.clock()
                randomerror()
                linkLoop = 0
                pctDone = 0
                gprint('\nMapping barriers at a radius of ' + str(radius) +
                       ' ' + str(mapUnits))
                if cfg.SUM_BARRIERS:
                    gprint('using SUM method')
                else:
                    gprint('using MAXIMUM method')
                if numCorridorLinks > 1:
                    gprint('0 percent done')
                lastMosaicRaster = None
                lastMosaicRasterPct = None
                for x in range(0, numLinks):
                    pctDone = lu.report_pct_done(linkLoop, numCorridorLinks,
                                                 pctDone)
                    linkId = str(int(linkTable[x, cfg.LTB_LINKID]))
                    if ((linkTable[x, cfg.LTB_LINKTYPE] > 0)
                            and (linkTable[x, cfg.LTB_LINKTYPE] < 1000)):
                        linkLoop = linkLoop + 1
                        # source and target cores
                        corex = int(coreList[x, 0])
                        corey = int(coreList[x, 1])

                        # Get cwd rasters for source and target cores
                        cwdRaster1 = lu.get_cwd_path(corex)
                        cwdRaster2 = lu.get_cwd_path(corey)

                        # Mask out areas above CWD threshold
                        cwdTemp1 = None
                        cwdTemp2 = None
                        if cfg.BARRIER_CWD_THRESH is not None:
                            if x == 1:
                                lu.dashline(1)
                                gprint('  Using CWD threshold of ' +
                                       str(cfg.BARRIER_CWD_THRESH) +
                                       ' map units.')
                            arcpy.env.extent = cfg.RESRAST
                            arcpy.env.cellSize = cfg.RESRAST
                            arcpy.env.snapRaster = cfg.RESRAST
                            cwdTemp1 = path.join(cfg.SCRATCHDIR,
                                                 "tmp" + str(corex))
                            outCon = arcpy.sa.Con(
                                cwdRaster1 < float(cfg.BARRIER_CWD_THRESH),
                                cwdRaster1)
                            outCon.save(cwdTemp1)
                            cwdRaster1 = cwdTemp1
                            cwdTemp2 = path.join(cfg.SCRATCHDIR,
                                                 "tmp" + str(corey))
                            outCon = arcpy.sa.Con(
                                cwdRaster2 < float(cfg.BARRIER_CWD_THRESH),
                                cwdRaster2)
                            outCon.save(cwdTemp2)
                            cwdRaster2 = cwdTemp2

                        focalRaster1 = lu.get_focal_path(corex, radius)
                        focalRaster2 = lu.get_focal_path(corey, radius)

                        link = lu.get_links_from_core_pairs(
                            linkTable, corex, corey)
                        lcDist = float(linkTable[link, cfg.LTB_CWDIST])

                        # Detect barriers at radius using neighborhood stats
                        # Create the Neighborhood Object
                        innerRadius = radius - 1
                        outerRadius = radius

                        dia = 2 * radius
                        InNeighborhood = ("ANNULUS " + str(innerRadius) + " " +
                                          str(outerRadius) + " MAP")

                        @retry(10)
                        def execFocal():
                            randomerror()
                            # Execute FocalStatistics
                            if not path.exists(focalRaster1):
                                arcpy.env.extent = cwdRaster1
                                outFocalStats = arcpy.sa.FocalStatistics(
                                    cwdRaster1, InNeighborhood, "MINIMUM",
                                    "DATA")
                                if setCoresToNull:
                                    outFocalStats2 = arcpy.sa.Con(
                                        outFocalStats > 0, outFocalStats
                                    )  # Set areas overlapping cores to NoData xxx
                                    outFocalStats2.save(focalRaster1)  #xxx
                                else:
                                    outFocalStats.save(focalRaster1)  #xxx
                                arcpy.env.extent = cfg.RESRAST

                            if not path.exists(focalRaster2):
                                arcpy.env.extent = cwdRaster2
                                outFocalStats = arcpy.sa.FocalStatistics(
                                    cwdRaster2, InNeighborhood, "MINIMUM",
                                    "DATA")
                                if setCoresToNull:
                                    outFocalStats2 = arcpy.sa.Con(
                                        outFocalStats > 0, outFocalStats
                                    )  # Set areas overlapping cores to NoData xxx
                                    outFocalStats2.save(focalRaster2)  #xxx
                                else:
                                    outFocalStats.save(focalRaster2)  #xxx

                                arcpy.env.extent = cfg.RESRAST

                        execFocal()

                        lu.delete_data(cwdTemp1)
                        lu.delete_data(cwdTemp2)

                        barrierRaster = path.join(
                            cbarrierdir, "b" + str(radius) + "_" + str(corex) +
                            "_" + str(corey) + '.tif')

                        if cfg.SUM_BARRIERS:  # Need to set nulls to 0, also
                            # create trim rasters as we go

                            outRas = ((lcDist - Raster(focalRaster1) -
                                       Raster(focalRaster2) - dia) / dia)
                            outCon = arcpy.sa.Con(IsNull(outRas), 0, outRas)
                            outCon2 = arcpy.sa.Con(outCon < 0, 0, outCon)
                            outCon2.save(barrierRaster)

                            # Execute FocalStatistics to fill out search radii
                            InNeighborhood = "CIRCLE " + str(
                                outerRadius) + " MAP"
                            fillRaster = path.join(
                                cbarrierdir, "b" + str(radius) + "_" +
                                str(corex) + "_" + str(corey) + "_fill.tif")
                            outFocalStats = arcpy.sa.FocalStatistics(
                                barrierRaster, InNeighborhood, "MAXIMUM",
                                "DATA")
                            outFocalStats.save(fillRaster)

                            if cfg.WRITE_TRIM_RASTERS:
                                trmRaster = path.join(
                                    cbarrierdir,
                                    "b" + str(radius) + "_" + str(corex) +
                                    "_" + str(corey) + "_trim.tif")
                                rasterList = [fillRaster, resistFillRaster]
                                outCellStatistics = arcpy.sa.CellStatistics(
                                    rasterList, "MINIMUM")
                                outCellStatistics.save(trmRaster)

                        else:
                            #Calculate potential benefit per map unit restored
                            @retry(10)
                            def calcBen():
                                randomerror()
                                outRas = ((lcDist - Raster(focalRaster1) -
                                           Raster(focalRaster2) - dia) / dia)
                                outRas.save(barrierRaster)

                            calcBen()

                        if cfg.WRITE_PCT_RASTERS:
                            #Calculate PERCENT potential benefit per unit restored
                            barrierRasterPct = path.join(
                                cbarrierdir, "b" + str(radius) + "_" +
                                str(corex) + "_" + str(corey) + '_pct.tif')

                            @retry(10)
                            def calcBenPct():
                                randomerror()
                                outras = (100 *
                                          (Raster(barrierRaster) / lcDist))
                                outras.save(barrierRasterPct)

                            calcBenPct()

                        # Mosaic barrier results across core area pairs
                        mosaicDir = path.join(
                            cfg.SCRATCHDIR,
                            'mos' + str(radId) + '_' + str(x + 1))
                        lu.create_dir(mosaicDir)

                        mosFN = 'mos_temp'
                        tempMosaicRaster = path.join(mosaicDir, mosFN)
                        tempMosaicRasterTrim = path.join(
                            mosaicDir, 'mos_temp_trm')
                        arcpy.env.workspace = mosaicDir
                        if linkLoop == 1:
                            #If this is the first grid then copy rather than mosaic
                            arcpy.CopyRaster_management(
                                barrierRaster, tempMosaicRaster)
                            if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                                arcpy.CopyRaster_management(
                                    trmRaster, tempMosaicRasterTrim)

                        else:
                            if cfg.SUM_BARRIERS:
                                outCon = arcpy.sa.Con(
                                    Raster(barrierRaster) < 0,
                                    lastMosaicRaster,
                                    Raster(barrierRaster) +
                                    Raster(lastMosaicRaster))
                                outCon.save(tempMosaicRaster)
                                if cfg.WRITE_TRIM_RASTERS:
                                    outCon = arcpy.sa.Con(
                                        Raster(trmRaster) < 0,
                                        lastMosaicRasterTrim,
                                        Raster(trmRaster) +
                                        Raster(lastMosaicRasterTrim))
                                    outCon.save(tempMosaicRasterTrim)

                            else:
                                rasterString = ('"' + barrierRaster + ";" +
                                                lastMosaicRaster + '"')

                                @retry(10)
                                def mosaicToNew():
                                    randomerror()
                                    arcpy.MosaicToNewRaster_management(
                                        rasterString, mosaicDir, mosFN, "",
                                        "32_BIT_FLOAT", arcpy.env.cellSize,
                                        "1", "MAXIMUM", "MATCH")

                                mosaicToNew()
                                # gprint(str(corex)+'0'+str(corey))

                        if linkLoop > 1:  #Clean up from previous loop
                            lu.delete_data(lastMosaicRaster)
                            lastMosaicDir = path.dirname(lastMosaicRaster)
                            lu.clean_out_workspace(lastMosaicDir)
                            lu.delete_dir(lastMosaicDir)

                        lastMosaicRaster = tempMosaicRaster
                        if cfg.WRITE_TRIM_RASTERS:
                            lastMosaicRasterTrim = tempMosaicRasterTrim
                        if cfg.WRITE_PCT_RASTERS:
                            mosPctFN = 'mos_temp_pct'
                            mosaicDirPct = path.join(
                                cfg.SCRATCHDIR,
                                'mosP' + str(radId) + '_' + str(x + 1))
                            lu.create_dir(mosaicDirPct)
                            tempMosaicRasterPct = path.join(
                                mosaicDirPct, mosPctFN)
                            if linkLoop == 1:
                                # If this is the first grid then copy
                                # rather than mosaic
                                if cfg.SUM_BARRIERS:
                                    outCon = arcpy.sa.Con(
                                        Raster(barrierRasterPct) < 0, 0,
                                        arcpy.sa.Con(IsNull(barrierRasterPct),
                                                     0, barrierRasterPct))
                                    outCon.save(tempMosaicRasterPct)
                                else:
                                    arcpy.CopyRaster_management(
                                        barrierRasterPct, tempMosaicRasterPct)

                            else:
                                if cfg.SUM_BARRIERS:

                                    @retry(10)
                                    def sumBarriers():
                                        randomerror()
                                        outCon = arcpy.sa.Con(
                                            Raster(barrierRasterPct) < 0,
                                            lastMosaicRasterPct,
                                            Raster(barrierRasterPct) +
                                            Raster(lastMosaicRasterPct))
                                        outCon.save(tempMosaicRasterPct)

                                    sumBarriers()
                                else:
                                    rasterString = ('"' + barrierRasterPct +
                                                    ";" + lastMosaicRasterPct +
                                                    '"')

                                    @retry(10)
                                    def maxBarriers():
                                        randomerror()
                                        arcpy.MosaicToNewRaster_management(
                                            rasterString, mosaicDirPct,
                                            mosPctFN, "", "32_BIT_FLOAT",
                                            arcpy.env.cellSize, "1", "MAXIMUM",
                                            "MATCH")

                                    maxBarriers()

                            if linkLoop > 1:  #Clean up from previous loop
                                lu.delete_data(lastMosaicRasterPct)
                                lastMosaicDirPct = path.dirname(
                                    lastMosaicRasterPct)
                                lu.clean_out_workspace(lastMosaicDirPct)
                                lu.delete_dir(lastMosaicDirPct)

                            # lu.delete_data(lastMosaicRasterPct)
                            lastMosaicRasterPct = tempMosaicRasterPct

                        if not cfg.SAVEBARRIERRASTERS:
                            lu.delete_data(barrierRaster)
                            if cfg.WRITE_PCT_RASTERS:
                                lu.delete_data(barrierRasterPct)
                            if cfg.WRITE_TRIM_RASTERS:
                                lu.delete_data(trmRaster)

                        # Temporarily disable links in linktable -
                        # don't want to mosaic them twice
                        for y in range(x + 1, numLinks):
                            corex1 = int(coreList[y, 0])
                            corey1 = int(coreList[y, 1])
                            if corex1 == corex and corey1 == corey:
                                linkTable[y, cfg.LTB_LINKTYPE] = (
                                    linkTable[y, cfg.LTB_LINKTYPE] + 1000)
                            elif corex1 == corey and corey1 == corex:
                                linkTable[y, cfg.LTB_LINKTYPE] = (
                                    linkTable[y, cfg.LTB_LINKTYPE] + 1000)

                if numCorridorLinks > 1 and pctDone < 100:
                    gprint('100 percent done')
                gprint('Summarizing barrier data for search radius.')
                #rows that were temporarily disabled
                rows = npy.where(linkTable[:, cfg.LTB_LINKTYPE] > 1000)
                linkTable[rows, cfg.LTB_LINKTYPE] = (
                    linkTable[rows, cfg.LTB_LINKTYPE] - 1000)

                # -----------------------------------------------------------------

                # Set negative values to null or zero and write geodatabase.
                mosaicFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" +
                            str(radius))
                mosaicRaster = path.join(cfg.BARRIERGDB, mosaicFN)
                arcpy.env.extent = cfg.RESRAST

                # if setCoresToNull:
                # outCon = arcpy.sa.Con(Raster(tempMosaicRaster) < 0, 0,
                # tempMosaicRaster) #xxx
                # outCon.save(mosaicRaster) #xxx
                # else:
                outSetNull = arcpy.sa.SetNull(tempMosaicRaster,
                                              tempMosaicRaster,
                                              "VALUE < 0")  #xxx orig
                outSetNull.save(mosaicRaster)

                lu.delete_data(tempMosaicRaster)

                if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                    mosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix +
                                "_Rad" + str(radius))
                    mosaicRasterTrim = path.join(cfg.BARRIERGDB, mosaicFN)
                    arcpy.CopyRaster_management(tempMosaicRasterTrim,
                                                mosaicRasterTrim)
                    lu.delete_data(tempMosaicRaster)

                if cfg.WRITE_PCT_RASTERS:
                    # Do same for percent raster
                    mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix +
                                   "_Rad" + str(radius))
                    arcpy.env.extent = cfg.RESRAST
                    outSetNull = arcpy.sa.SetNull(tempMosaicRasterPct,
                                                  tempMosaicRasterPct,
                                                  "VALUE < 0")
                    mosaicRasterPct = path.join(cfg.BARRIERGDB, mosaicPctFN)
                    outSetNull.save(mosaicRasterPct)
                    lu.delete_data(tempMosaicRasterPct)

                # 'Grow out' maximum restoration gain to
                # neighborhood size for display
                InNeighborhood = "CIRCLE " + str(outerRadius) + " MAP"
                # Execute FocalStatistics
                fillRasterFN = "barriers_fill" + str(outerRadius) + tif
                fillRaster = path.join(cfg.BARRIERBASEDIR, fillRasterFN)
                outFocalStats = arcpy.sa.FocalStatistics(
                    mosaicRaster, InNeighborhood, "MAXIMUM", "DATA")
                outFocalStats.save(fillRaster)

                if cfg.WRITE_PCT_RASTERS:
                    # Do same for percent raster
                    fillRasterPctFN = "barriers_fill_pct" + str(
                        outerRadius) + tif
                    fillRasterPct = path.join(cfg.BARRIERBASEDIR,
                                              fillRasterPctFN)
                    outFocalStats = arcpy.sa.FocalStatistics(
                        mosaicRasterPct, InNeighborhood, "MAXIMUM", "DATA")
                    outFocalStats.save(fillRasterPct)

                #Place copies of filled rasters in output geodatabase
                arcpy.env.workspace = cfg.BARRIERGDB
                fillRasterFN = (PREFIX + "_BarrrierCircles" + sumSuffix +
                                "_Rad" + str(outerRadius))
                arcpy.CopyRaster_management(fillRaster, fillRasterFN)
                if cfg.WRITE_PCT_RASTERS:
                    fillRasterPctFN = (PREFIX + "_BarrrierCircles_Pct" +
                                       sumSuffix + "_Rad" + str(outerRadius))
                    arcpy.CopyRaster_management(fillRasterPct, fillRasterPctFN)

                if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                    # Create pared-down version of filled raster- remove pixels
                    # that don't need restoring by allowing a pixel to only
                    # contribute its resistance value to restoration gain
                    outRasterFN = "barriers_trm" + str(outerRadius) + tif
                    outRaster = path.join(cfg.BARRIERBASEDIR, outRasterFN)
                    rasterList = [fillRaster, resistFillRaster]
                    outCellStatistics = arcpy.sa.CellStatistics(
                        rasterList, "MINIMUM")
                    outCellStatistics.save(outRaster)

                    #SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER
                    outRaster2FN = ("barriers_trm" + sumSuffix +
                                    str(outerRadius) + "_2" + tif)
                    outRaster2 = path.join(cfg.BARRIERBASEDIR, outRaster2FN)
                    output = arcpy.sa.Con(IsNull(fillRaster), fillRaster,
                                          outRaster)
                    output.save(outRaster2)
                    outRasterFN = (PREFIX + "_BarrierCircles_RBMin" +
                                   sumSuffix + "_Rad" + str(outerRadius))

                    outRasterPath = path.join(cfg.BARRIERGDB, outRasterFN)
                    arcpy.CopyRaster_management(outRaster2, outRasterFN)
                randomerror()
                startTime = lu.elapsed_time(startTime)

            # Call the above function
            doRadiusLoop()

        # Combine rasters across radii
        gprint('\nCreating summary rasters...')
        if startRadius != endRadius:
            radiiSuffix = ('_Rad' + str(int(startRadius)) + 'To' +
                           str(int(endRadius)) + 'Step' + str(int(radiusStep)))
            mosaicFN = "bar_radii"
            mosaicPctFN = "bar_radii_pct"
            arcpy.env.workspace = cfg.BARRIERBASEDIR
            for radius in range(startRadius, endRadius + 1, radiusStep):
                #Fixme: run speed test with gdb mosaicking above and here
                radiusFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" +
                            str(radius))
                radiusRaster = path.join(cfg.BARRIERGDB, radiusFN)

                if radius == startRadius:
                    #If this is the first grid then copy rather than mosaic
                    arcpy.CopyRaster_management(radiusRaster, mosaicFN)
                else:
                    mosaicRaster = path.join(cfg.BARRIERBASEDIR, mosaicFN)
                    arcpy.Mosaic_management(radiusRaster, mosaicRaster,
                                            "MAXIMUM", "MATCH")

                if cfg.WRITE_PCT_RASTERS:
                    radiusPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix +
                                   "_Rad" + str(radius))
                    radiusRasterPct = path.join(cfg.BARRIERGDB, radiusPctFN)

                    if radius == startRadius:
                        #If this is the first grid then copy rather than mosaic
                        arcpy.CopyRaster_management(radiusRasterPct,
                                                    mosaicPctFN)
                    else:
                        mosaicRasterPct = path.join(cfg.BARRIERBASEDIR,
                                                    mosaicPctFN)
                        arcpy.Mosaic_management(radiusRasterPct,
                                                mosaicRasterPct, "MAXIMUM",
                                                "MATCH")

            # Copy results to output geodatabase
            arcpy.env.workspace = cfg.BARRIERGDB
            mosaicFN = PREFIX + "_BarrierCenters" + sumSuffix + radiiSuffix
            arcpy.CopyRaster_management(mosaicRaster, mosaicFN)

            if cfg.WRITE_PCT_RASTERS:
                mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix +
                               radiiSuffix)
                arcpy.CopyRaster_management(mosaicRasterPct, mosaicPctFN)

            #GROWN OUT rasters
            fillMosaicFN = "barriers_radii_fill" + tif
            fillMosaicPctFN = "barriers_radii_fill_pct" + tif
            fillMosaicRaster = path.join(cfg.BARRIERBASEDIR, fillMosaicFN)
            fillMosaicRasterPct = path.join(cfg.BARRIERBASEDIR,
                                            fillMosaicPctFN)

            arcpy.env.workspace = cfg.BARRIERBASEDIR
            for radius in range(startRadius, endRadius + 1, radiusStep):
                radiusFN = "barriers_fill" + str(radius) + tif
                #fixme- do this when only a single radius too
                radiusRaster = path.join(cfg.BARRIERBASEDIR, radiusFN)
                if radius == startRadius:
                    #If this is the first grid then copy rather than mosaic
                    arcpy.CopyRaster_management(radiusRaster, fillMosaicFN)
                else:
                    arcpy.Mosaic_management(radiusRaster, fillMosaicRaster,
                                            "MAXIMUM", "MATCH")

                if cfg.WRITE_PCT_RASTERS:
                    radiusPctFN = "barriers_fill_pct" + str(radius) + tif
                    #fixme- do this when only a single radius too
                    radiusRasterPct = path.join(cfg.BARRIERBASEDIR,
                                                radiusPctFN)
                    if radius == startRadius:
                        #If this is the first grid then copy rather than mosaic
                        arcpy.CopyRaster_management(radiusRasterPct,
                                                    fillMosaicPctFN)
                    else:
                        arcpy.Mosaic_management(radiusRasterPct,
                                                fillMosaicRasterPct, "MAXIMUM",
                                                "MATCH")

            # Copy result to output geodatabase
            arcpy.env.workspace = cfg.BARRIERGDB
            fillMosaicFN = PREFIX + "_BarrierCircles" + sumSuffix + radiiSuffix
            arcpy.CopyRaster_management(fillMosaicRaster, fillMosaicFN)
            if cfg.WRITE_PCT_RASTERS:
                fillMosaicPctFN = (PREFIX + "_BarrierCircles_Pct" + sumSuffix +
                                   radiiSuffix)
                arcpy.CopyRaster_management(fillMosaicRasterPct,
                                            fillMosaicPctFN)


#            if not cfg.SUM_BARRIERS:
#GROWN OUT AND TRIMMED rasters (Can't do percent)
            if cfg.WRITE_TRIM_RASTERS:
                trimMosaicFN = "bar_radii_trm"
                arcpy.env.workspace = cfg.BARRIERBASEDIR
                trimMosaicRaster = path.join(cfg.BARRIERBASEDIR, trimMosaicFN)
                for radius in range(startRadius, endRadius + 1, radiusStep):
                    radiusFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix +
                                "_Rad" + str(radius))
                    #fixme- do this when only a single radius too
                    radiusRaster = path.join(cfg.BARRIERGDB, radiusFN)

                    if radius == startRadius:
                        #If this is the first grid then copy rather than mosaic
                        arcpy.CopyRaster_management(radiusRaster, trimMosaicFN)
                    else:
                        arcpy.Mosaic_management(radiusRaster, trimMosaicRaster,
                                                "MAXIMUM", "MATCH")
                # Copy result to output geodatabase
                arcpy.env.workspace = cfg.BARRIERGDB
                trimMosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix +
                                radiiSuffix)
                arcpy.CopyRaster_management(trimMosaicRaster, trimMosaicFN)

        if not cfg.SAVE_RADIUS_RASTERS:
            arcpy.env.workspace = cfg.BARRIERGDB
            rasters = arcpy.ListRasters()
            for raster in rasters:
                if 'rad' in raster.lower() and not 'step' in raster.lower():
                    lu.delete_data(raster)

        arcpy.env.workspace = cfg.BARRIERGDB
        rasters = arcpy.ListRasters()
        for raster in rasters:
            gprint('\nBuilding output statistics and pyramids\n'
                   'for raster ' + raster)
            lu.build_stats(raster)

        #Clean up temporary files and directories
        if not cfg.SAVEBARRIERRASTERS:
            lu.delete_dir(cbarrierdir)
            lu.delete_dir(cfg.BARRIERBASEDIR)

        if not cfg.SAVEFOCALRASTERS:
            for radius in range(startRadius, endRadius + 1, radiusStep):
                core1path = lu.get_focal_path(1, radius)
                path1, dir1 = path.split(core1path)
                path2, dir2 = path.split(path1)
                lu.delete_dir(path2)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 6. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 6. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 5
0
def lm_master(argv=None):
    """Main function for linkage mapper.

    Called by ArcMap with parameters or run from command line with parameters
    entered in script below.  Calls functions in dedicated scripts for each of
    5 processing steps.

    """
    # Setup global variables
    if not cfg.lm_configured:  # Causing problems with iterative scripting
        if argv is None:
            argv = sys.argv
        cfg.configure(cfg.TOOL_LM, argv)

    gp = cfg.gp

    try:
        gprint = lu.gprint
        # Move results from earlier versions to new directory structure
        lu.move_old_results()
        gp.OverwriteOutput = True
        gp.pyramid = "NONE"
        gp.rasterstatistics = "NONE"

        # Create output directories if they don't exist
        if gp.Exists(cfg.OUTPUTDIR):
            gp.RefreshCatalog(cfg.OUTPUTDIR)
        lu.create_dir(cfg.OUTPUTDIR)
        lu.create_dir(cfg.LOGDIR)
        lu.create_dir(cfg.MESSAGEDIR)
        lu.create_dir(cfg.DATAPASSDIR)
        # Create fresh scratch directory if not restarting in midst of step 3
        # if cfg.S2EUCDISTFILE != None:
        # if cfg.S2EUCDISTFILE.lower() == "restart": pass
        # else:
        lu.delete_dir(cfg.SCRATCHDIR)
        lu.create_dir(cfg.SCRATCHDIR)
        lu.create_dir(cfg.ARCSCRATCHDIR)
        if cfg.TOOL == 'Linkage Mapper':
            cfg.logFilePath = lu.create_log_file(cfg.MESSAGEDIR, cfg.TOOL,
                                                 cfg.PARAMS)
        lu.print_drive_warning()

        installD = gp.GetInstallInfo("desktop")
        gprint('\nLinkage Mapper Version ' + cfg.releaseNum)
        try:
            gprint('on ArcGIS ' + installD['ProductName'] + ' ' +
                   installD['Version'] + ' Service Pack ' +
                   installD['SPNumber'])
        except:
            pass

        if cfg.CONNECTFRAGS:
            # gwarn = gp.AddWarning
            lu.dashline(1)
            lu.warn(
                'Custom mode: will run steps 1-2 ONLY to cluster core polygons within '
            )
            lu.warn(
                'the maximum Euclidean corridor distance from one another ')
            lu.warn('into polygons with a single cluster_ID value.')
            lu.warn(
                'Make sure you have set a Maximum Euclidean corridor distance.'
            )
            lu.dashline(2)
            cfg.STEP3 = False
            cfg.STEP4 = False
            cfg.STEP5 = False
            if cfg.MAXEUCDIST == None:
                raise RuntimeError('Maximum Euclidean distance required '
                                   'for custom cluster mode.')

        # Set data frame spatial reference to coordinate system of input data
        # Problems arise in this script (core raster creation) and in S2
        # (generate near table) if they differ.
        lu.set_dataframe_sr()

        # Check core ID field and project directory name.
        lu.check_cores(cfg.COREFC, cfg.COREFN)
        lu.check_project_dir()

        # Identify first step cleanup link tables from that point
        lu.dashline(1)
        if cfg.STEP1:
            gprint('Starting at step 1.')
            firststep = 1
        elif cfg.STEP2:
            gprint('Starting at step 2.')
            firststep = 2
        elif cfg.STEP3:
            gprint('Starting at step 3.')
            firststep = 3
            linkTableFile = lu.get_prev_step_link_table(step=3)  # Check exists
        elif cfg.STEP4:
            gprint('Starting at step 4.')
            firststep = 4
            linkTableFile = lu.get_prev_step_link_table(step=4)  # Check exists
        elif cfg.STEP5:
            gprint('Starting at step 5.')
            firststep = 5
            linkTableFile = lu.get_prev_step_link_table(step=5)  # Check exists
        lu.clean_up_link_tables(firststep)

        # Make a local grid copy of resistance raster for cwd runs-
        # will run faster than gdb.
        # Don't know if raster is in a gdb if entered from TOC
        lu.delete_data(cfg.RESRAST)
        gprint('\nMaking temporary copy of resistance raster for this run.')
        gp.OutputCoordinateSystem = gp.describe(cfg.COREFC).SpatialReference
        gp.Extent = gp.Describe(cfg.RESRAST_IN).Extent
        gp.SnapRaster = cfg.RESRAST_IN
        gp.cellSize = gp.Describe(cfg.RESRAST_IN).MeanCellHeight
        # import pdb; pdb.set_trace()
        try:
            gp.CopyRaster_management(cfg.RESRAST_IN, cfg.RESRAST)
        except:
            msg = ('ERROR: Could not make a copy of your resistance raster. ' +
                   'Try re-starting ArcMap to release the file lock.')
            lu.raise_error(msg)

        if (cfg.STEP1) or (cfg.STEP3):
            # Make core raster file
            gprint('\nMaking temporary raster of core file for this run.')
            lu.delete_data(cfg.CORERAS)
            gp.FeatureToRaster_conversion(
                cfg.COREFC, cfg.COREFN, cfg.CORERAS,
                gp.Describe(cfg.RESRAST).MeanCellHeight)
        # #   gp.RasterToPolygon_conversion(cfg.CORERAS, cfg.COREFC,
        # "NO_SIMPLIFY")

        def delete_final_gdb(finalgdb):
            """Deletes final geodatabase"""
            if gp.Exists(finalgdb) and cfg.STEP5:
                try:
                    lu.clean_out_workspace(finalgdb)
                except:
                    lu.dashline(1)
                    msg = ('ERROR: Could not remove contents of geodatabase ' +
                           finalgdb + '. \nIs it open in ArcMap? You may '
                           'need to re-start ArcMap to release the file lock.')
                    lu.raise_error(msg)
                lu.delete_dir(finalgdb)

        # Delete final output geodatabase
        delete_final_gdb(cfg.OUTPUTGDB_OLD)
        delete_final_gdb(cfg.OUTPUTGDB)
        delete_final_gdb(cfg.EXTRAGDB)
        delete_final_gdb(cfg.LINKMAPGDB)

        # Run linkage mapper processing steps
        if cfg.STEP1:
            s1.STEP1_get_adjacencies()
        if cfg.STEP2:
            s2.STEP2_build_network()
        if cfg.STEP3:
            s3.STEP3_calc_cwds()
        if cfg.STEP4:
            s4.STEP4_refine_network()
        if cfg.STEP5:
            s5.STEP5_calc_lccs()
            lu.dashline()
            gprint('Results from this run can be found in your output '
                   'directory:')
            gprint(cfg.OUTPUTDIR)

        # Clean up
        lu.delete_dir(cfg.SCRATCHDIR)
        # lu.delete_data(cfg.FCORES)

        gp.addmessage('\nDone with linkage mapping.\n')

    # Return GEOPROCESSING specific errors
    except arcgisscripting.ExecuteError:
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.exit_with_python_error(_SCRIPT_NAME)

    finally:
        lu.dashline()
        gprint('A record of run settings and messages can be found in your '
               'log directory:')
        gprint(cfg.MESSAGEDIR)
        lu.dashline(2)
        lu.close_log_file()
Exemplo n.º 6
0
def calc_lccs(normalize):
    try:
        if normalize:
            mosaicBaseName = "_corridors"
            writeTruncRaster = cfg.WRITETRUNCRASTER
            outputGDB = cfg.OUTPUTGDB
            if cfg.CALCNONNORMLCCS:
                SAVENORMLCCS = False
            else:
                SAVENORMLCCS = cfg.SAVENORMLCCS
        else:
            mosaicBaseName = "_NON_NORMALIZED_corridors"
            SAVENORMLCCS = False
            outputGDB = cfg.EXTRAGDB
            writeTruncRaster = False

        lu.dashline(1)
        gprint('Running script ' + _SCRIPT_NAME)
        linkTableFile = lu.get_prev_step_link_table(step=5)
        if cfg.useArcpy:
            arcpy.env.workspace = cfg.SCRATCHDIR
            arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
            arcpy.env.overwriteOutput = True
            arcpy.env.compression = "NONE"
        else:
            gp.workspace = cfg.SCRATCHDIR
            gp.scratchWorkspace = cfg.ARCSCRATCHDIR
            gp.OverwriteOutput = True

        if cfg.MAXEUCDIST is not None:
            gprint('Max Euclidean distance between cores')
            gprint('for linkage mapping set to ' + str(cfg.MAXEUCDIST))

        if cfg.MAXCOSTDIST is not None:
            gprint('Max cost-weighted distance between cores')
            gprint('for linkage mapping set to ' + str(cfg.MAXCOSTDIST))

        # set the analysis extent and cell size to that of the resistance
        # surface
        if cfg.useArcpy:
            arcpy.env.Extent = cfg.RESRAST
            arcpy.env.cellSize = cfg.RESRAST
            arcpy.env.snapRaster = cfg.RESRAST
            arcpy.env.mask = cfg.RESRAST
        else:
            gp.Extent = (gp.Describe(cfg.RESRAST)).Extent
            gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight
            gp.mask = cfg.RESRAST
            gp.snapraster = cfg.RESRAST

        linkTable = lu.load_link_table(linkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg = ('\nThere are no corridors to map. Bailing.')
            lu.raise_error(msg)

        if not cfg.STEP3 and not cfg.STEP4:
            # re-check for links that are too long or in case script run out of
            # sequence with more stringent settings
            gprint('Double-checking for corridors that are too long to map.')
            DISABLE_LEAST_COST_NO_VAL = True
            linkTable, numDroppedLinks = lu.drop_links(
                linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST,
                cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL)

        # Added to try to speed up:
        gp.pyramid = "NONE"
        gp.rasterstatistics = "NONE"

        # set up directories for normalized lcc and mosaic grids
        dirCount = 0
        gprint("Creating output folder: " + cfg.LCCBASEDIR)
        lu.delete_dir(cfg.LCCBASEDIR)
        gp.CreateFolder_management(path.dirname(cfg.LCCBASEDIR),
                                   path.basename(cfg.LCCBASEDIR))
        gp.CreateFolder_management(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        clccdir = path.join(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        # mosaicGDB = path.join(cfg.LCCBASEDIR, "mosaic.gdb")
        # gp.createfilegdb(cfg.LCCBASEDIR, "mosaic.gdb")
        #mosaicRaster = mosaicGDB + '\\' + "nlcc_mos" # Full path
        gprint("")
        if normalize:
            gprint('Normalized least-cost corridors will be written '
                   'to ' + clccdir + '\n')
        PREFIX = cfg.PREFIX

        # Add CWD layers for core area pairs to produce NORMALIZED LCC layers
        numGridsWritten = 0
        coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]
        coreList = npy.sort(coreList)

        x = 0
        linkCount = 0
        endIndex = numLinks
        while x < endIndex:
            if (linkTable[x, cfg.LTB_LINKTYPE] < 1):  # If not a valid link
                x = x + 1
                continue

            linkCount = linkCount + 1
            start_time = time.clock()

            linkId = str(int(linkTable[x, cfg.LTB_LINKID]))

            # source and target cores
            corex = int(coreList[x, 0])
            corey = int(coreList[x, 1])

            # Get cwd rasters for source and target cores
            cwdRaster1 = lu.get_cwd_path(corex)
            cwdRaster2 = lu.get_cwd_path(corey)

            if not gp.Exists(cwdRaster1):
                msg = ('\nError: cannot find cwd raster:\n' + cwdRaster1)
            if not gp.Exists(cwdRaster2):
                msg = ('\nError: cannot find cwd raster:\n' + cwdRaster2)
                lu.raise_error(msg)

            lccNormRaster = path.join(clccdir,
                                      str(corex) + "_" +
                                      str(corey))  # + ".tif")
            if cfg.useArcpy:
                arcpy.env.Extent = "MINOF"
            else:
                gp.Extent = "MINOF"

            # FIXME: need to check for this?:
            # if exists already, don't re-create
            #if not gp.Exists(lccRaster):

            link = lu.get_links_from_core_pairs(linkTable, corex, corey)

            offset = 10000

            # Normalized lcc rasters are created by adding cwd rasters and
            # subtracting the least cost distance between them.
            count = 0
            if arcpyAvailable:
                cfg.useArcpy = True  # Fixes Canran Liu's bug with lcDist
            if cfg.useArcpy:

                lcDist = (float(linkTable[link, cfg.LTB_CWDIST]) - offset)

                if normalize:
                    statement = (
                        'outras = Raster(cwdRaster1) + Raster('
                        'cwdRaster2) - lcDist; outras.save(lccNormRaster)')

                else:
                    statement = ('outras =Raster(cwdRaster1) + Raster('
                                 'cwdRaster2); outras.save(lccNormRaster)')
            else:
                if normalize:
                    lcDist = str(linkTable[link, cfg.LTB_CWDIST] - offset)
                    expression = (cwdRaster1 + " + " + cwdRaster2 + " - " +
                                  lcDist)
                else:
                    expression = (cwdRaster1 + " + " + cwdRaster2)
                statement = ('gp.SingleOutputMapAlgebra_sa(expression, '
                             'lccNormRaster)')
            count = 0
            while True:
                try:
                    exec statement
                    randomerror()
                except:
                    count, tryAgain = lu.retry_arc_error(count, statement)
                    if not tryAgain:
                        exec statement
                else:
                    break
            cfg.useArcpy = False  # End fix for Conran Liu's bug with lcDist

            if normalize and cfg.useArcpy:
                try:
                    minObject = gp.GetRasterProperties(lccNormRaster,
                                                       "MINIMUM")
                    rasterMin = float(str(minObject.getoutput(0)))
                except:
                    gp.AddWarning(
                        '\n------------------------------------------------')
                    gp.AddWarning(
                        'WARNING: Raster minimum check failed in step 5. \n'
                        'This may mean the output rasters are corrupted. Please \n'
                        'be sure to check for valid rasters in ' + outputGDB)
                    rasterMin = 0
                tolerance = (float(gp.cellSize) * -10) + offset
                if rasterMin < tolerance:
                    lu.dashline(1)
                    msg = (
                        'WARNING: Minimum value of a corridor #' + str(x + 1) +
                        ' is much less than zero (' + str(rasterMin) + ').'
                        '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                        'were too small and a corridor passed outside of a '
                        'bounding circle, or that a corridor passed outside of the '
                        'resistance map. \n')
                    gp.AddWarning(msg)

            if cfg.useArcpy:
                arcpy.env.Extent = cfg.RESRAST
            else:
                gp.Extent = (gp.Describe(cfg.RESRAST)).Extent

            mosaicDir = path.join(cfg.LCCBASEDIR, 'mos' + str(x + 1))
            lu.create_dir(mosaicDir)
            mosFN = 'mos'  #.tif' change and move
            mosaicRaster = path.join(mosaicDir, mosFN)

            if numGridsWritten == 0 and dirCount == 0:
                #If this is the first grid then copy rather than mosaic
                arcObj.CopyRaster_management(lccNormRaster, mosaicRaster)
            else:

                rasterString = '"' + lccNormRaster + ";" + lastMosaicRaster + '"'
                statement = ('arcObj.MosaicToNewRaster_management('
                             'rasterString,mosaicDir,mosFN, "", '
                             '"32_BIT_FLOAT", gp.cellSize, "1", "MINIMUM", '
                             '"MATCH")')
                # statement = ('arcpy.Mosaic_management(lccNormRaster, '
                # 'mosaicRaster, "MINIMUM", "MATCH")')

                count = 0
                while True:
                    try:
                        lu.write_log('Executing mosaic for link #' +
                                     str(linkId))
                        exec statement
                        lu.write_log('Done with mosaic.')
                        randomerror()
                    except:
                        count, tryAgain = lu.retry_arc_error(count, statement)
                        lu.delete_data(mosaicRaster)
                        lu.delete_dir(mosaicDir)
                        # Try a new directory
                        mosaicDir = path.join(
                            cfg.LCCBASEDIR,
                            'mos' + str(x + 1) + '_' + str(count))
                        lu.create_dir(mosaicDir)
                        mosaicRaster = path.join(mosaicDir, mosFN)
                        if not tryAgain:
                            exec statement
                    else:
                        break
            endTime = time.clock()
            processTime = round((endTime - start_time), 2)

            if normalize == True:
                printText = "Normalized and mosaicked "
            else:
                printText = "Mosaicked NON-normalized "
            gprint(printText + "corridor for link ID #" + str(linkId) +
                   " connecting core areas " + str(corex) + " and " +
                   str(corey) + " in " + str(processTime) + " seconds. " +
                   str(int(linkCount)) + " out of " +
                   str(int(numCorridorLinks)) + " links have been "
                   "processed.")

            # temporarily disable links in linktable - don't want to mosaic
            # them twice
            for y in range(x + 1, numLinks):
                corex1 = int(coreList[y, 0])
                corey1 = int(coreList[y, 1])
                if corex1 == corex and corey1 == corey:
                    linkTable[y, cfg.LTB_LINKTYPE] = (
                        linkTable[y, cfg.LTB_LINKTYPE] + 1000)
                elif corex1 == corey and corey1 == corex:
                    linkTable[y, cfg.LTB_LINKTYPE] = (
                        linkTable[y, cfg.LTB_LINKTYPE] + 1000)

            numGridsWritten = numGridsWritten + 1
            if not SAVENORMLCCS:
                lu.delete_data(lccNormRaster)
                lu.delete_dir(clccdir)
                lu.create_dir(clccdir)
            else:
                if numGridsWritten == 100:
                    # We only write up to 100 grids to any one folder
                    # because otherwise Arc slows to a crawl
                    dirCount = dirCount + 1
                    numGridsWritten = 0
                    clccdir = path.join(cfg.LCCBASEDIR,
                                        cfg.LCCNLCDIR_NM + str(dirCount))
                    gprint("Creating output folder: " + clccdir)
                    gp.CreateFolder_management(cfg.LCCBASEDIR,
                                               path.basename(clccdir))

            if numGridsWritten > 1 or dirCount > 0:
                lu.delete_data(lastMosaicRaster)
                lu.delete_dir(path.dirname(lastMosaicRaster))

            lastMosaicRaster = mosaicRaster
            x = x + 1

        #rows that were temporarily disabled
        rows = npy.where(linkTable[:, cfg.LTB_LINKTYPE] > 1000)
        linkTable[rows,
                  cfg.LTB_LINKTYPE] = (linkTable[rows, cfg.LTB_LINKTYPE] -
                                       1000)
        # ---------------------------------------------------------------------

        # Create output geodatabase
        if not gp.exists(outputGDB):
            gp.createfilegdb(cfg.OUTPUTDIR, path.basename(outputGDB))

        if cfg.useArcpy:
            arcpy.env.workspace = outputGDB
        else:
            gp.workspace = outputGDB

        gp.pyramid = "NONE"
        gp.rasterstatistics = "NONE"

        # Copy mosaic raster to output geodatabase
        saveFloatRaster = False
        if saveFloatRaster == True:
            floatRaster = outputGDB + '\\' + PREFIX + mosaicBaseName + '_flt'  # Full path
            statement = 'arcObj.CopyRaster_management(mosaicRaster, floatRaster)'
            try:
                exec statement
            except:
                pass

        # ---------------------------------------------------------------------
        # convert mosaic raster to integer
        intRaster = path.join(outputGDB, PREFIX + mosaicBaseName)
        if cfg.useArcpy:
            statement = ('outras = Int(Raster(mosaicRaster) - offset + 0.5); '
                         'outras.save(intRaster)')
        else:
            expression = "int(" + mosaicRaster + " - " + str(
                offset) + " + 0.5)"
            statement = 'gp.SingleOutputMapAlgebra_sa(expression, intRaster)'
        count = 0
        while True:
            try:
                exec statement
                randomerror()
            except:
                count, tryAgain = lu.retry_arc_error(count, statement)
                if not tryAgain: exec statement
            else: break
        # ---------------------------------------------------------------------

        if writeTruncRaster:
            # -----------------------------------------------------------------
            # Set anything beyond cfg.CWDTHRESH to NODATA.
            if arcpyAvailable:
                cfg.useArcpy = True  # For Alissa Pump's error with 10.1
            cutoffText = str(cfg.CWDTHRESH)
            if cutoffText[-6:] == '000000':
                cutoffText = cutoffText[0:-6] + 'm'
            elif cutoffText[-3:] == '000':
                cutoffText = cutoffText[0:-3] + 'k'

            truncRaster = (outputGDB + '\\' + PREFIX + mosaicBaseName +
                           '_truncated_at_' + cutoffText)

            count = 0
            if cfg.useArcpy:
                statement = ('outRas = Raster(intRaster) * '
                             '(Con(Raster(intRaster) <= cfg.CWDTHRESH,1)); '
                             'outRas.save(truncRaster)')
            else:
                expression = ("(" + intRaster + " * (con(" + intRaster +
                              "<= " + str(cfg.CWDTHRESH) + ",1)))")
                statement = ('gp.SingleOutputMapAlgebra_sa(expression, '
                             'truncRaster)')
            count = 0
            while True:
                try:
                    exec statement
                    randomerror()
                except:
                    count, tryAgain = lu.retry_arc_error(count, statement)
                    if not tryAgain: exec statement
                else: break
            cfg.useArcpy = False  # End fix for Alissa Pump's error with 10.1
        # ---------------------------------------------------------------------
        # Check for unreasonably low minimum NLCC values
        try:
            mosaicGrid = path.join(cfg.LCCBASEDIR, 'mos')
            # Copy to grid to test
            arcObj.CopyRaster_management(mosaicRaster, mosaicGrid)
            minObject = gp.GetRasterProperties(mosaicGrid, "MINIMUM")
            rasterMin = float(str(minObject.getoutput(0)))
        except:
            gp.AddWarning('\n------------------------------------------------')
            gp.AddWarning(
                'WARNING: Raster minimum check failed in step 5. \n'
                'This may mean the output rasters are corrupted. Please \n'
                'be sure to check for valid rasters in ' + outputGDB)
            rasterMin = 0
        tolerance = (float(gp.cellSize) * -10)

        if rasterMin < tolerance:
            lu.dashline(1)
            msg = ('WARNING: Minimum value of mosaicked corridor map is '
                   'much less than zero (' + str(rasterMin) + ').'
                   '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                   'were too small and a corridor passed outside of a '
                   'bounding circle, or that a corridor passed outside of the '
                   'resistance map. \n')
            gp.AddWarning(msg)

        gprint('\nWriting final LCP maps...')
        if cfg.STEP4:
            finalLinkTable = lu.update_lcp_shapefile(linkTable,
                                                     lastStep=4,
                                                     thisStep=5)
        elif cfg.STEP3:
            finalLinkTable = lu.update_lcp_shapefile(linkTable,
                                                     lastStep=3,
                                                     thisStep=5)
        else:
            # Don't know if step 4 was run, since this is started at step 5.
            # Use presence of previous linktable files to figure this out.
            # Linktable name includes step number.
            prevLinkTableFile = lu.get_prev_step_link_table(step=5)
            prevStepInd = len(prevLinkTableFile) - 5
            lastStep = prevLinkTableFile[prevStepInd]

            finalLinkTable = lu.update_lcp_shapefile(linkTable,
                                                     lastStep,
                                                     thisStep=5)

        outlinkTableFile = lu.get_this_step_link_table(step=5)
        gprint('Updating ' + outlinkTableFile)
        lu.write_link_table(linkTable, outlinkTableFile)

        linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s5.csv")
        lu.write_link_table(linkTable, linkTableLogFile)

        linkTableFinalFile = path.join(cfg.OUTPUTDIR,
                                       PREFIX + "_linkTable_s5.csv")
        lu.write_link_table(finalLinkTable, linkTableFinalFile)
        gprint('Copy of final linkTable written to ' + linkTableFinalFile)

        gprint('Creating shapefiles with linework for links.')
        try:
            lu.write_link_maps(outlinkTableFile, step=5)
        except:
            lu.write_link_maps(outlinkTableFile, step=5)

        # Create final linkmap files in output directory, and remove files from
        # scratch.
        lu.copy_final_link_maps(step=5)

        if not SAVENORMLCCS:
            lu.delete_dir(cfg.LCCBASEDIR)

        # Build statistics for corridor rasters
        gp.addmessage('\nBuilding output statistics and pyramids '
                      'for corridor raster')
        lu.build_stats(intRaster)

        if writeTruncRaster:
            gp.addmessage('Building output statistics '
                          'for truncated corridor raster')
            lu.build_stats(truncRaster)

    # Return GEOPROCESSING specific errors
    except arcgisscripting.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 7
0
def calc_lccs(normalize):
    try:
        if normalize:
            mosaicBaseName = "_corridors"
            writeTruncRaster = cfg.WRITETRUNCRASTER
            outputGDB = cfg.OUTPUTGDB
            SAVENORMLCCS = cfg.SAVENORMLCCS
        else:
            mosaicBaseName = "_NON_NORMALIZED_corridors"
            SAVENORMLCCS = False
            outputGDB = cfg.EXTRAGDB
            writeTruncRaster = False

        lu.dashline(1)
        gprint('Running script ' + _SCRIPT_NAME)
        linkTableFile = lu.get_prev_step_link_table(step=5)
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.env.compression = "NONE"

        if cfg.MAXEUCDIST is not None:
            gprint('Max Euclidean distance between cores')
            gprint('for linkage mapping set to ' +
                              str(cfg.MAXEUCDIST))

        if cfg.MAXCOSTDIST is not None:
            gprint('Max cost-weighted distance between cores')
            gprint('for linkage mapping set to ' +
                              str(cfg.MAXCOSTDIST))


        # set the analysis extent and cell size to that of the resistance
        # surface
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = arcpy.Describe(cfg.RESRAST).MeanCellHeight
        arcpy.env.snapRaster = cfg.RESRAST
        arcpy.env.mask = cfg.RESRAST

        linkTable = lu.load_link_table(linkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg =('\nThere are no corridors to map. Bailing.')
            lu.raise_error(msg)


        if not cfg.STEP3 and not cfg.STEP4:
            # re-check for links that are too long or in case script run out of
            # sequence with more stringent settings
            gprint('Double-checking for corridors that are too long to map.')
            DISABLE_LEAST_COST_NO_VAL = True
            linkTable,numDroppedLinks = lu.drop_links(
                linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST,
                cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL)

        # Added to try to speed up:
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # set up directories for normalized lcc and mosaic grids
        dirCount = 0
        gprint("Creating output folder: " + cfg.LCCBASEDIR)
        lu.delete_dir(cfg.LCCBASEDIR)
        arcpy.CreateFolder_management(path.dirname(cfg.LCCBASEDIR),
                                       path.basename(cfg.LCCBASEDIR))
        arcpy.CreateFolder_management(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        clccdir = path.join(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        gprint("")
        if normalize:
            gprint('Normalized least-cost corridors will be written '
                          'to ' + clccdir + '\n')
        PREFIX = cfg.PREFIX

        # Add CWD layers for core area pairs to produce NORMALIZED LCC layers
        numGridsWritten = 0
        coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1]
        coreList = npy.sort(coreList)

        x = 0
        linkCount = 0
        endIndex = numLinks
        while x < endIndex:
            if (linkTable[x, cfg.LTB_LINKTYPE] < 1): # If not a valid link
                x = x + 1
                continue

            linkCount = linkCount + 1
            start_time = time.clock()

            linkId = str(int(linkTable[x, cfg.LTB_LINKID]))

            # source and target cores
            corex=int(coreList[x,0])
            corey=int(coreList[x,1])

            # Get cwd rasters for source and target cores
            cwdRaster1 = lu.get_cwd_path(corex)
            cwdRaster2 = lu.get_cwd_path(corey)

            if not arcpy.Exists(cwdRaster1):
                msg =('\nError: cannot find cwd raster:\n' + cwdRaster1)
            if not arcpy.Exists(cwdRaster2):
                msg =('\nError: cannot find cwd raster:\n' + cwdRaster2)
                lu.raise_error(msg)


            lccNormRaster = path.join(clccdir, str(corex) + "_" +
                                      str(corey))# + ".tif")
            arcpy.env.extent = "MINOF"

            link = lu.get_links_from_core_pairs(linkTable, corex, corey)

            offset = 10000

            # Normalized lcc rasters are created by adding cwd rasters and
            # subtracting the least cost distance between them.
            lcDist = (float(linkTable[link,cfg.LTB_CWDIST]) - offset)

            if normalize:
                statement = ('outras = arcpy.sa.Raster(cwdRaster1) '
                             '+ arcpy.sa.Raster(cwdRaster2) - lcDist; '
                             'outras.save(lccNormRaster)')
            else:
                statement = ('outras = arcpy.sa.Raster(cwdRaster1) '
                             '+ arcpy.sa.Raster(cwdRaster2); '
                             'outras.save(lccNormRaster)')

            count = 0
            while True:
                try:
                    exec(statement)
                except Exception:
                    count,tryAgain = lu.retry_arc_error(count,statement)
                    if not tryAgain:
                        exec(statement)
                else: break

            if normalize:
                try:
                    minObject = arcpy.GetRasterProperties_management(lccNormRaster, "MINIMUM")
                    rasterMin = float(str(minObject.getOutput(0)))
                except Exception:
                    lu.warn('\n------------------------------------------------')
                    lu.warn('WARNING: Raster minimum check failed in step 5. \n'
                        'This may mean the output rasters are corrupted. Please \n'
                        'be sure to check for valid rasters in '+ outputGDB)
                    rasterMin = 0
                tolerance = (float(arcpy.env.cellSize) * -10)
                if rasterMin < tolerance:
                    lu.dashline(1)
                    msg = ('WARNING: Minimum value of a corridor #' + str(x+1)
                           + ' is much less than zero ('+str(rasterMin)+').'
                           '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                           'were too small and a corridor passed outside of a '
                           'bounding circle, or that a corridor passed outside of the '
                           'resistance map. \n')
                    lu.warn(msg)

            arcpy.env.extent = cfg.RESRAST

            mosaicDir = path.join(cfg.LCCBASEDIR,'mos'+str(x+1))
            lu.create_dir(mosaicDir)
            mosFN = 'mos'#.tif' change and move
            mosaicRaster = path.join(mosaicDir,mosFN)

            if numGridsWritten == 0 and dirCount == 0:
                #If this is the first grid then copy rather than mosaic
                arcpy.CopyRaster_management(lccNormRaster, mosaicRaster)
            else:
                statement = (
                    'arcpy.MosaicToNewRaster_management('
                    'input_rasters=";".join([lccNormRaster, '
                    'lastMosaicRaster]), output_location=mosaicDir, '
                    'raster_dataset_name_with_extension=mosFN, '
                    'pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, '
                    'number_of_bands="1", mosaic_method="MINIMUM")')

                count = 0
                while True:
                    try:
                        lu.write_log('Executing mosaic for link #'+str(linkId))
                        exec(statement)
                        lu.write_log('Done with mosaic.')
                    except Exception:
                        count,tryAgain = lu.retry_arc_error(count,statement)
                        lu.delete_data(mosaicRaster)
                        lu.delete_dir(mosaicDir)
                        # Try a new directory
                        mosaicDir = path.join(cfg.LCCBASEDIR,'mos'+str(x+1)+ '_' + str(count))
                        lu.create_dir(mosaicDir)
                        mosaicRaster = path.join(mosaicDir,mosFN)
                        if not tryAgain:
                            exec(statement)
                    else: break
            endTime = time.clock()
            processTime = round((endTime - start_time), 2)

            if normalize == True:
                printText = "Normalized and mosaicked "
            else:
                printText = "Mosaicked NON-normalized "
            gprint(printText + "corridor for link ID #" + str(linkId) +
                    " connecting core areas " + str(corex) +
                    " and " + str(corey)+ " in " +
                    str(processTime) + " seconds. " + str(int(linkCount)) +
                    " out of " + str(int(numCorridorLinks)) + " links have been "
                    "processed.")

            # temporarily disable links in linktable - don't want to mosaic
            # them twice
            for y in range (x+1,numLinks):
                corex1 = int(coreList[y,0])
                corey1 = int(coreList[y,1])
                if corex1 == corex and corey1 == corey:
                    linkTable[y,cfg.LTB_LINKTYPE] = (
                        linkTable[y,cfg.LTB_LINKTYPE] + 1000)
                elif corex1==corey and corey1==corex:
                    linkTable[y,cfg.LTB_LINKTYPE] = (
                            linkTable[y,cfg.LTB_LINKTYPE] + 1000)

            numGridsWritten = numGridsWritten + 1
            if not SAVENORMLCCS:
                lu.delete_data(lccNormRaster)
                lu.delete_dir(clccdir)
                lu.create_dir(clccdir)
            else:
                if numGridsWritten == 100:
                    # We only write up to 100 grids to any one folder
                    # because otherwise Arc slows to a crawl
                    dirCount = dirCount + 1
                    numGridsWritten = 0
                    clccdir = path.join(cfg.LCCBASEDIR,
                                        cfg.LCCNLCDIR_NM + str(dirCount))
                    gprint("Creating output folder: " + clccdir)
                    arcpy.CreateFolder_management(cfg.LCCBASEDIR,
                                               path.basename(clccdir))

            if numGridsWritten > 1 or dirCount > 0:
                lu.delete_data(lastMosaicRaster)
                lu.delete_dir(path.dirname(lastMosaicRaster))

            lastMosaicRaster = mosaicRaster
            x = x + 1

        #rows that were temporarily disabled
        rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE]>1000)
        linkTable[rows,cfg.LTB_LINKTYPE] = (
            linkTable[rows,cfg.LTB_LINKTYPE] - 1000)
        # ---------------------------------------------------------------------

        # Create output geodatabase
        if not arcpy.Exists(outputGDB):
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR, path.basename(outputGDB))

        arcpy.env.workspace = outputGDB

        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # ---------------------------------------------------------------------
        # convert mosaic raster to integer
        intRaster = path.join(outputGDB,PREFIX + mosaicBaseName)
        statement = ('outras = arcpy.sa.Int(arcpy.sa.Raster(mosaicRaster) '
                     '- offset + 0.5); '
                     'outras.save(intRaster)')
        count = 0
        while True:
            try:
                exec(statement)
            except Exception:
                count,tryAgain = lu.retry_arc_error(count,statement)
                if not tryAgain: exec(statement)
            else: break
        # ---------------------------------------------------------------------


        if writeTruncRaster:
            # -----------------------------------------------------------------
            # Set anything beyond cfg.CWDTHRESH to NODATA.
            truncRaster = (outputGDB + '\\' + PREFIX + mosaicBaseName +
                           '_truncated_at_' + lu.cwd_cutoff_str(cfg.CWDTHRESH))

            statement = ('outRas = arcpy.sa.Raster(intRaster)'
                         '* (arcpy.sa.Con(arcpy.sa.Raster(intRaster) '
                         '<= cfg.CWDTHRESH, 1)); '
                         'outRas.save(truncRaster)')

            count = 0
            while True:
                try:
                    exec(statement)
                except Exception:
                    count,tryAgain = lu.retry_arc_error(count,statement)
                    if not tryAgain: exec(statement)
                else: break
        # ---------------------------------------------------------------------
        # Check for unreasonably low minimum NLCC values
        try:
            mosaicGrid = path.join(cfg.LCCBASEDIR,'mos')
            # Copy to grid to test
            arcpy.CopyRaster_management(mosaicRaster, mosaicGrid)
            minObject = arcpy.GetRasterProperties_management(mosaicGrid, "MINIMUM")
            rasterMin = float(str(minObject.getOutput(0)))
        except Exception:
            lu.warn('\n------------------------------------------------')
            lu.warn('WARNING: Raster minimum check failed in step 5. \n'
                'This may mean the output rasters are corrupted. Please \n'
                'be sure to check for valid rasters in '+ outputGDB)
            rasterMin = 0
        tolerance = (float(arcpy.env.cellSize) * -10)
        if rasterMin < tolerance:
            lu.dashline(1)
            msg = ('WARNING: Minimum value of mosaicked corridor map is '
                   'much less than zero ('+str(rasterMin)+').'
                   '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                   'were too small and a corridor passed outside of a '
                   'bounding circle, or that a corridor passed outside of the '
                   'resistance map. \n')
            lu.warn(msg)


        gprint('\nWriting final LCP maps...')
        if cfg.STEP4:
            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=4,
                                                     thisStep=5)
        elif cfg.STEP3:
            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=3,
                                                     thisStep=5)
        else:
            # Don't know if step 4 was run, since this is started at step 5.
            # Use presence of previous linktable files to figure this out.
            # Linktable name includes step number.
            prevLinkTableFile = lu.get_prev_step_link_table(step=5)
            prevStepInd = len(prevLinkTableFile) - 5
            lastStep = prevLinkTableFile[prevStepInd]

            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep,
                                                     thisStep=5)

        outlinkTableFile = lu.get_this_step_link_table(step=5)
        gprint('Updating ' + outlinkTableFile)
        lu.write_link_table(linkTable, outlinkTableFile)

        linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s5.csv")
        lu.write_link_table(linkTable, linkTableLogFile)

        linkTableFinalFile = path.join(cfg.OUTPUTDIR, PREFIX +
                                       "_linkTable_s5.csv")
        lu.write_link_table(finalLinkTable, linkTableFinalFile)
        gprint('Copy of final linkTable written to '+
                          linkTableFinalFile)

        gprint('Creating shapefiles with linework for links.')
        try:
            lu.write_link_maps(outlinkTableFile, step=5)
        except Exception:
            lu.write_link_maps(outlinkTableFile, step=5)

        # Create final linkmap files in output directory, and remove files from
        # scratch.
        lu.copy_final_link_maps(step=5)

        if not SAVENORMLCCS:
            lu.delete_dir(cfg.LCCBASEDIR)

        # Build statistics for corridor rasters
        arcpy.AddMessage('\nBuilding output statistics and pyramids '
                          'for corridor raster')
        lu.build_stats(intRaster)

        if writeTruncRaster:
            arcpy.AddMessage('Building output statistics '
                              'for truncated corridor raster')
            lu.build_stats(truncRaster)

        save_parameters()
        if cfg.OUTPUTFORMODELBUILDER:
            arcpy.CopyFeatures_management(cfg.COREFC, cfg.OUTPUTFORMODELBUILDER)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 8
0
def do_cwd_calcs(x, linkTable, coresToMap, lcpLoop, failures):
    try:
        # This is the focal core area we're running cwd out from
        sourceCore = int(coresToMap[x])

        # Create temporary scratch directory just this focal core
        coreDir = path.join(cfg.SCRATCHDIR, 'core' + str(sourceCore))
        lu.delete_dir(coreDir)
        lu.create_dir(coreDir)

        if arcpy:
            gp = arcpy.gp
            arcpy.env.workspace = coreDir
            arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
            arcpy.env.overwriteOutput = True
            arcpy.env.extent = "MINOF"
        else:
            gp = cfg.gp
            gp.workspace = coreDir
            gp.scratchWorkspace = cfg.ARCSCRATCHDIR
            gp.OverwriteOutput = True
            gp.Extent = "MINOF"

        write_cores_to_map(x, coresToMap)

        # Get target cores based on linktable with reinstated links
        # (we temporarily disable them below by adding 1000)
        linkTableTemp = linkTable.copy()
        # reinstate temporarily disabled links
        rows = npy.where(linkTableTemp[:,cfg.LTB_LINKTYPE] > 1000)
        linkTableTemp[rows,cfg.LTB_LINKTYPE] = (
            linkTableTemp[rows,cfg.LTB_LINKTYPE] - 1000)
        del rows

        # get core areas to be connected to focal core
        targetCores = lu.get_core_targets(sourceCore, linkTableTemp)
        # gprint( str(sourceCore))
        # gprint(str(linkTableTemp.astype('int32')))
        # gprint('targets'+str(targetCores))
        del linkTableTemp

        if len(targetCores)==0:
            # Nothing to do, so reset failure count and return.
            failures = 0
            return linkTable, failures, lcpLoop

        lu.dashline(0)
        gprint('Target core areas for core area #' +
                          str(sourceCore) + ' = ' + str(targetCores))

        # -------------------------------------------------------------
        # Create BOUNDING FEATURE to limit extent of cost distance
        # calculations-This is a set of circles encompassing core areas
        # we'll be connecting each core area to.
        if cfg.BUFFERDIST is not None:
            # fixme: move outside of loop   # new circle
            gp.MakeFeatureLayer(cfg.BNDCIRS,"fGlobalBoundingFeat")

            start_time = time.clock()
            # loop through targets and get bounding circles that
            # contain focal core and target cores
            # gprint("\nAdding up bounding circles for source"
                              # " core " + str(sourceCore))
            gp.SelectLayerByAttribute("fGlobalBoundingFeat",
                                          "CLEAR_SELECTION")
            for i in range(len(targetCores)):
                # run thru circleList, find link that core pair
                # corresponds to.
                if sourceCore < targetCores[i]:
                    corex = sourceCore
                    corey = targetCores[i]
                else:
                    corey = sourceCore
                    corex = targetCores[i]

                cores_x_y = str(int(corex))+'_'+str(int(corey))
                field = "cores_x_y"
                # fixme: need to check for case where link is not found
                gp.SelectLayerByAttribute(
                    "fGlobalBoundingFeat", "ADD_TO_SELECTION", field +
                    " = '" + cores_x_y + "'")

            lu.delete_data(path.join(coreDir,cfg.BNDFC))
            # fixme: may not be needed- can we just clip raster
            # using selected?
            gp.CopyFeatures_management("fGlobalBoundingFeat",
                                           cfg.BNDFC)

            # Clip out bounded area of resistance raster for cwd
            # calculations from focal core
            bResistance = path.join(coreDir,"bResistance") # Can't be tif-
                                                           # need STA for CWD
            lu.delete_data(bResistance)
            statement = (
                'gp.ExtractByMask_sa(cfg.BOUNDRESIS, cfg.BNDFC, bResistance)')
            try:
                exec statement
                randomerror()
            except:
                failures = lu.print_arcgis_failures(statement, failures)
                if failures < 20:
                    return None,failures,lcpLoop
                else: exec statement

        else:
            bResistance = cfg.BOUNDRESIS
        # ---------------------------------------------------------
        # CWD Calculations
        outDistanceRaster = lu.get_cwd_path(sourceCore)
        # Check if climate tool is calling linkage mapper
        if cfg.TOOL == cfg.TOOL_CC:
            back_rast = outDistanceRaster.replace("cwd_", "back_")
        else:
            back_rast = "BACK"
            lu.delete_data(path.join(coreDir, back_rast))
            lu.delete_data(outDistanceRaster)
            start_time = time.clock()

            # Create raster that just has source core in it
            # Note: this seems faster than setnull with LI grid.
            SRCRASTER = 'source' + tif
            lu.delete_data(path.join(coreDir,SRCRASTER))
            if arcpy:
                statement = ('conRaster = '
                             'Con(Raster(cfg.CORERAS) == int(sourceCore), 1);'
                             'conRaster.save(SRCRASTER)')
            else:
                expression = ("con(" + cfg.CORERAS + " == " +
                               str(int(sourceCore)) + ", 1)")
                statement = ('gp.SingleOutputMapAlgebra_sa'
                            '(expression, SRCRASTER)')

            try:
                exec statement
                randomerror()
            except:
                failures = lu.print_arcgis_failures(statement, failures)
                if failures < 20:
                    return None, failures, lcpLoop
                else: exec statement

            # Cost distance raster creation
            if arcpy:
                arcpy.env.extent = "MINOF"
            else:
                gp.Extent = "MINOF"

            lu.delete_data(path.join(coreDir,"BACK"))
            
            if arcpy:
                statement = ('outCostDist = CostDistance(SRCRASTER, '
                             'bResistance, cfg.TMAXCWDIST, back_rast);'
                             'outCostDist.save(outDistanceRaster)')
            else:
                statement = ('gp.CostDistance_sa(SRCRASTER, bResistance, '
                             'outDistanceRaster, cfg.TMAXCWDIST, back_rast)')
            try:
                exec statement
                randomerror()
            except:
                failures = lu.print_arcgis_failures(statement, failures)
                if failures < 20:
                    return None, failures, lcpLoop
                else:
                    exec statement

        start_time = time.clock()
        # Extract cost distances from source core to target cores
        # Fixme: there will be redundant calls to b-a when already
        # done a-b
        ZNSTATS = path.join(coreDir, "zonestats.dbf")
        lu.delete_data(ZNSTATS)
        #Fixme: zonalstatistics is returning integer values for minimum. Why???
        #Extra zonalstatistics code implemented later in script to correct
        #values.
        if arcpy:
            statement = ('outZSaT = ZonalStatisticsAsTable(cfg.CORERAS, '
                    '"VALUE", outDistanceRaster,ZNSTATS, "DATA", "MINIMUM")')
        else:
            statement = ('gp.zonalstatisticsastable_sa('
                      'cfg.CORERAS, "VALUE", outDistanceRaster, ZNSTATS)')
                      
        try:
            exec statement
            randomerror()
        except:
            failures = lu.print_arcgis_failures(statement, failures)
            if failures < 20:
                return None,failures,lcpLoop
            else:
                if cfg.TOOL == cfg.TOOL_CC:
                    msg = ('ERROR in Zonal Stats. Please restart ArcMap '
                        'and try again.')                
                else:
                    msg = ('ERROR in Zonal Stats. Restarting ArcMap '
                        'then restarting Linkage Mapper at step 3 usually\n'
                        'solves this one so please restart and try again.')

                lu.raise_error(msg)
        tableRows = gp.searchcursor(ZNSTATS)
        tableRow = tableRows.Next()
        while tableRow:
            if tableRow.Value > sourceCore:
                link = lu.get_links_from_core_pairs(linkTable,
                                                    sourceCore,
                                                    tableRow.Value)
                if linkTable[link,cfg.LTB_LINKTYPE] > 0: # valid link
                    linkTable[link,cfg.LTB_CWDIST] = tableRow.Min
                    if cfg.MAXCOSTDIST is not None:
                        if ((tableRow.Min > cfg.MAXCOSTDIST) and
                           (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP)):
                             # Disable link, it's too long
                            linkTable[link,cfg.LTB_LINKTYPE] = cfg.LT_TLLC
                    if cfg.MINCOSTDIST is not None:
                        if (tableRow.Min < cfg.MINCOSTDIST and
                           (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP)):
                            # Disable link, it's too short
                            linkTable[link,cfg.LTB_LINKTYPE] = cfg.LT_TSLC
            tableRow = tableRows.next()
        del tableRow, tableRows
        #start_time = lu.elapsed_time(start_time)

        # ---------------------------------------------------------
        # Check for intermediate cores AND map LCP lines
        for y in range(0,len(targetCores)):
            targetCore = targetCores[y]
            rows = lu.get_links_from_core_pairs(linkTable, sourceCore,
                                                targetCore)
            # Map all links for which we successfully extracted
            #  cwds in above code
            link = rows[0]
            if (linkTable[link,cfg.LTB_LINKTYPE] > 0 and
                linkTable[link,cfg.LTB_LINKTYPE] < 1000 and
                linkTable[link,cfg.LTB_CWDIST] != -1):
                # Flag so that we only evaluate this pair once
                linkTable[rows,cfg.LTB_LINKTYPE] = (linkTable
                                               [rows,cfg.LTB_LINKTYPE]
                                               + 1000)

                # Create raster that just has target core in it
                TARGETRASTER = 'targ' + tif
                lu.delete_data(path.join(coreDir,TARGETRASTER))
                try:
                    if arcpy:
                        # For climate corridors, errors occur when core raster
                        # overlaps null values in cwd rasters
                        statement = ('conRaster = Con(IsNull(outDistanceRaster'
                            '), Int(outDistanceRaster), Con(Raster'
                            '(cfg.CORERAS) == int(targetCore), 1));'
                            'conRaster.save(TARGETRASTER)') 
                        # statement = ('conRaster = Con(Raster('
                                    # 'cfg.CORERAS) == int(targetCore), 1);'
                                    # 'conRaster.save(TARGETRASTER)')

                    else:
                        expression = ("con(" + cfg.CORERAS + " == " +
                        str(int(targetCore)) + ",1)")
                        statement = ('gp.SingleOutputMapAlgebra_sa(expression,'
                                     ' TARGETRASTER)')
                    exec statement
                    randomerror()
                except:
                    failures = lu.print_arcgis_failures(statement, failures)
                    if failures < 20:
                        return None,failures,lcpLoop
                    else: exec statement
                # Execute ZonalStatistics to get more precise cw distance if
                # arc rounded it earlier (not critical, hence the try/pass)
                if (linkTable[link,cfg.LTB_CWDIST] ==
                                int(linkTable[link,cfg.LTB_CWDIST])):
                    try:
                        zonalRas = path.join(coreDir,'zonal')
                        gp.ZonalStatistics_sa(TARGETRASTER, "VALUE",
                            outDistanceRaster, zonalRas, "MINIMUM", "DATA")
                        minObject = gp.GetRasterProperties_management(zonalRas,
                                                                "MINIMUM")
                        rasterMin = float(str(minObject.getOutput(0)))
                        linkTable[link,cfg.LTB_CWDIST] = rasterMin
                        lu.delete_data(zonalRas)
                    except:
                        pass
                # Cost path maps the least cost path
                # between source and target
                lcpRas = path.join(coreDir,"lcp" + tif)
                lu.delete_data(lcpRas)

                # Note: costpath (both gp and arcpy versions) uses GDAL.               
                if arcpy:
                    statement = ('outCostPath = CostPath(TARGETRASTER,'
                          'outDistanceRaster, back_rast, "BEST_SINGLE", ""); '
                          'outCostPath.save(lcpRas)')
                else:
                    statement = ('gp.CostPath_sa(TARGETRASTER, '
                                 'outDistanceRaster, back_rast, '
                                 'lcpRas, "BEST_SINGLE", "")')
                try:
                    exec statement                    
                    randomerror()
                except:
                    failures = lu.print_arcgis_failures(statement, failures)
                    if failures < 20:
                        return None,failures,lcpLoop
                    else:
                        lu.dashline(1)
                        gprint('\nCost path is failing for Link #'
                           + str(int(link)) + ' connecting core areas ' +
                            str(int(sourceCore)) + ' and ' +
                            str(int(targetCore)) + '\n.'
                            'Retrying one more time in 5 minutes.')
                        lu.snooze(300)
                        exec statement
                
                # fixme: may be fastest to not do selection, do
                # EXTRACTBYMASK, getvaluelist, use code snippet at end
                # of file to discard src and target values. Still this
                # is fast- 13 sec for LI data...But I'm not very
                # comfortable using failed coreMin as our test....
                if (cfg.S3DROPLCCSic and
                    (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP) and
                    (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP + 1000)):
                    # -------------------------------------------------
                    # Drop links where lcp passes through intermediate
                    # core area. Method below is faster than valuelist
                    # method because of soma in valuelist method.
                    # make a feature layer for input cores to select from
                    gp.MakeFeatureLayer(cfg.COREFC, cfg.FCORES)

                    gp.SelectLayerByAttribute(cfg.FCORES,
                                              "NEW_SELECTION",
                                              cfg.COREFN + ' <> ' +
                                              str(int(targetCore)) +
                                              ' AND ' + cfg.COREFN +
                                              ' <> ' +
                                              str(int(sourceCore)))

                    corePairRas = path.join(coreDir,"s3corepair"+ tif)
                    if arcpy:
                        arcpy.env.extent = cfg.BOUNDRESIS
                    else:
                        gp.extent = gp.Describe(cfg.BOUNDRESIS).extent


                    statement = ('gp.FeatureToRaster_conversion(cfg.FCORES, '
                                'cfg.COREFN, corePairRas, gp.cellSize)')
                    try:
                        exec statement
                        randomerror()
                    except:
                        failures = lu.print_arcgis_failures(statement,
                                                            failures)
                        if failures < 20:
                            return None,failures,lcpLoop
                        else: exec statement

                    #------------------------------------------
                    # Intermediate core test
                    try:
                        coreDetected = test_for_intermediate_core(coreDir,
                                                lcpRas, corePairRas)
                        randomerror()
                    except:
                        statement = 'test_for_intermediate_core'
                        failures = lu.print_arcgis_failures(statement,
                                                            failures)
                        if failures < 20:
                            return None,failures,lcpLoop
                        else:
                            coreDetected = test_for_intermediate_core(
                                        coreDir, lcpRas, corePairRas)

                    if coreDetected:
                        # lu.dashline()
                        gprint(
                            "Found an intermediate core in the "
                            "least-cost path between cores " +
                            str(int(sourceCore)) + " and " +
                            str(int(targetCore)) + ".  The corridor "
                            "will be removed.")
                        # disable link
                        rows = lu.get_links_from_core_pairs(linkTable,
                                                    sourceCore,
                                                    targetCore)
                        linkTable[rows,cfg.LTB_LINKTYPE] = cfg.LT_INT
                    #------------------------------------------

                # Create lcp shapefile.  lcploop just keeps track of
                # whether this is first time function is called.
                lcpLoop = lu.create_lcp_shapefile(coreDir, linkTable,
                                                  sourceCore, targetCore,
                                                  lcpLoop)

        # Made it through, so reset failure count and return.
        failures = 0
        lu.delete_dir(coreDir)
        # if cfg.TOOL == cfg.TOOL_CC:
            # lu.delete_data(back_rast)
        return linkTable, failures, lcpLoop

    # Return GEOPROCESSING specific errors
    except arcgisscripting.ExecuteError:
        lu.dashline(1)
        # gprint('****Failed in step 3. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        # gprint('****Failed in step 3. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
def circuitscape_master(argv=None):
    """

    """
    gprint = lu.gprint

    if argv is None:
        argv = sys.argv

    argv.append(get_cs_path())  # Add Circuitscape path

    cfg.configure(cfg.TOOL_CS, argv)

    try:
        lu.create_dir(cfg.LOGDIR)
        lu.create_dir(cfg.MESSAGEDIR)
        cfg.logFilePath = lu.create_log_file(cfg.PARAM_NAMES, argv)
        if cfg.DOPINCH :
            lu.log_metadata(cfg.COREFC, [cfg.RESRAST_IN])
        else:
            lu.log_metadata(cfg.COREFC)

        if cfg.CSPATH is None:
            lu.raise_error("Cannot find an installation of Circuitscape"
                           "\nin your Program Files directory.")

        lu.print_drive_warning()
        # Check core ID field.
        lu.check_cores(cfg.COREFC, cfg.COREFN)

        arcpy.env.outputCoordinateSystem = arcpy.Describe(cfg.COREFC).SpatialReference
        # Set data frame spatial reference to coordinate system of input data
        lu.set_dataframe_sr()

        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # Move adj and cwd results from earlier versions to datapass directory
        lu.move_old_results()

        if cfg.CWDCUTOFF > 0:
            lu.delete_dir(cfg.SCRATCHDIR)

        # restart code- in progress
        if cfg.CWDCUTOFF < 0:
            cfg.CWDCUTOFF = cfg.CWDCUTOFF * -1

        if not cfg.DOPINCH and not cfg.DOCENTRALITY:
            msg = ('ERROR: Please choose at least one option: pinch point or\n'
                    'network centrality analysis.')
            lu.raise_error(msg)

        lu.create_dir(cfg.SCRATCHDIR)
        lu.create_dir(cfg.ARCSCRATCHDIR)

        if cfg.DO_ALLPAIRS:
            #  Fixme: move raster path to config
            S5CORRIDORRAS = path.join(cfg.OUTPUTGDB,cfg.PREFIX + "_corridors")
            if not arcpy.Exists(S5CORRIDORRAS):
                S5CORRIDORRAS = path.join(cfg.OUTPUTGDB, cfg.PREFIX +
                                         "_lcc_mosaic_int")
            if not arcpy.Exists(S5CORRIDORRAS):
                msg = ('ERROR: Corridor raster created in step 5 is required'
                        '\nfor all-pair analyses, but was not found.')
                lu.raise_error(msg)
        if cfg.DOPINCH:
            if cfg.CWDCUTOFF == '#' or cfg.CWDCUTOFF == 0:
                msg = ('ERROR: CWD cutoff distance is required for pinch point'
                        ' analyses.')
                lu.raise_error(msg)

            # Make a local grid copy of resistance raster-
            # will run faster than gdb.
            lu.delete_data(cfg.RESRAST)
            if not arcpy.Exists(cfg.RESRAST_IN):
                msg = ('ERROR: Resistance raster is required for pinch point'
                        ' analyses, but was not found.')
                lu.raise_error(msg)

            arcpy.env.extent = cfg.RESRAST_IN
            desc = arcpy.Describe(cfg.RESRAST_IN)
            if hasattr(desc, "catalogPath"):
                cfg.RESRAST_IN = arcpy.Describe(cfg.RESRAST_IN).catalogPath

            gprint('\nMaking local copy of resistance raster.')
            try:
                arcpy.CopyRaster_management(cfg.RESRAST_IN, cfg.RESRAST)
            except Exception:
                msg = ('ERROR: Could not make a copy of your resistance raster. ' +
                    'Try re-starting ArcMap to release the file lock.')
                lu.raise_error(msg)

            arcpy.env.snapRaster = cfg.RESRAST

        if cfg.DOCENTRALITY:
            gprint("Creating output folder: " + cfg.CENTRALITYBASEDIR)
            if path.exists(cfg.CENTRALITYBASEDIR):
                shutil.rmtree(cfg.CENTRALITYBASEDIR)
            lu.create_dir(cfg.CENTRALITYBASEDIR)
            arcpy.CreateFolder_management(cfg.CENTRALITYBASEDIR,
                                        cfg.CIRCUITOUTPUTDIR_NM)
            arcpy.CreateFolder_management(cfg.CENTRALITYBASEDIR,
                                        cfg.CIRCUITCONFIGDIR_NM)
            lu.clean_out_workspace(cfg.CORECENTRALITYGDB)

            s7.STEP7_calc_centrality()
            if not cfg.SAVECENTRALITYDIR:
                lu.delete_dir(cfg.CENTRALITYBASEDIR)

        if cfg.DOPINCH:
            if cfg.CWDCUTOFF > 0: # Negative values mean we're restarting
                gprint("Creating output folder: " + cfg.CIRCUITBASEDIR)
                lu.delete_dir(cfg.CIRCUITBASEDIR)
                lu.create_dir(cfg.CIRCUITBASEDIR)
                arcpy.CreateFolder_management(cfg.CIRCUITBASEDIR,
                                        cfg.CIRCUITOUTPUTDIR_NM)
                arcpy.CreateFolder_management(cfg.CIRCUITBASEDIR,
                                        cfg.CIRCUITCONFIGDIR_NM)

            s8.STEP8_calc_pinchpoints()

            if not cfg.SAVE_TEMP_CIRCUIT_FILES:
                lu.delete_dir(cfg.SCRATCHDIR)
            if not cfg.SAVECIRCUITDIR:
                lu.delete_dir(cfg.CIRCUITBASEDIR)

        gprint('\nDONE!\n')

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 10
0
def do_cwd_calcs(x, linkTable, coresToMap, lcpLoop, failures):
    try:
        # This is the focal core area we're running cwd out from
        sourceCore = int(coresToMap[x])

        # Create temporary scratch directory just this focal core
        coreDir = path.join(cfg.SCRATCHDIR, 'core' + str(sourceCore))
        lu.delete_dir(coreDir)
        lu.create_dir(coreDir)

        if arcpy:
            gp = arcpy.gp
            arcpy.env.workspace = coreDir
            arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
            arcpy.env.overwriteOutput = True
            arcpy.env.extent = "MINOF"
        else:
            gp = cfg.gp
            gp.workspace = coreDir
            gp.scratchWorkspace = cfg.ARCSCRATCHDIR
            gp.OverwriteOutput = True
            gp.Extent = "MINOF"

        write_cores_to_map(x, coresToMap)

        # Get target cores based on linktable with reinstated links
        # (we temporarily disable them below by adding 1000)
        linkTableTemp = linkTable.copy()
        # reinstate temporarily disabled links
        rows = npy.where(linkTableTemp[:,cfg.LTB_LINKTYPE] > 1000)
        linkTableTemp[rows,cfg.LTB_LINKTYPE] = (
            linkTableTemp[rows,cfg.LTB_LINKTYPE] - 1000)
        del rows

        # get core areas to be connected to focal core
        targetCores = lu.get_core_targets(sourceCore, linkTableTemp)
        # gprint( str(sourceCore))
        # gprint(str(linkTableTemp.astype('int32')))
        # gprint('targets'+str(targetCores))
        del linkTableTemp

        if len(targetCores)==0:
            # Nothing to do, so reset failure count and return.
            failures = 0
            return linkTable, failures, lcpLoop

        lu.dashline(0)
        gprint('Target core areas for core area #' +
                          str(sourceCore) + ' = ' + str(targetCores))

        # -------------------------------------------------------------
        # Create BOUNDING FEATURE to limit extent of cost distance
        # calculations-This is a set of circles encompassing core areas
        # we'll be connecting each core area to.
        if cfg.BUFFERDIST is not None:
            # fixme: move outside of loop   # new circle
            gp.MakeFeatureLayer(cfg.BNDCIRS,"fGlobalBoundingFeat")

            start_time = time.clock()
            # loop through targets and get bounding circles that
            # contain focal core and target cores
            # gprint("\nAdding up bounding circles for source"
                              # " core " + str(sourceCore))
            gp.SelectLayerByAttribute("fGlobalBoundingFeat",
                                          "CLEAR_SELECTION")
            for i in range(len(targetCores)):
                # run thru circleList, find link that core pair
                # corresponds to.
                if sourceCore < targetCores[i]:
                    corex = sourceCore
                    corey = targetCores[i]
                else:
                    corey = sourceCore
                    corex = targetCores[i]

                cores_x_y = str(int(corex))+'_'+str(int(corey))
                field = "cores_x_y"
                # fixme: need to check for case where link is not found
                gp.SelectLayerByAttribute(
                    "fGlobalBoundingFeat", "ADD_TO_SELECTION", field +
                    " = '" + cores_x_y + "'")

            lu.delete_data(path.join(coreDir,cfg.BNDFC))
            # fixme: may not be needed- can we just clip raster
            # using selected?
            gp.CopyFeatures_management("fGlobalBoundingFeat",
                                           cfg.BNDFC)

            # Clip out bounded area of resistance raster for cwd
            # calculations from focal core
            bResistance = path.join(coreDir,"bResistance") # Can't be tif-
                                                           # need STA for CWD
            lu.delete_data(bResistance)
            statement = (
                'gp.ExtractByMask_sa(cfg.BOUNDRESIS, cfg.BNDFC, bResistance)')
            try:
                exec statement
                randomerror()
            except:
                failures = lu.print_arcgis_failures(statement, failures)
                if failures < 20:
                    return None,failures,lcpLoop
                else: exec statement

        else:
            bResistance = cfg.BOUNDRESIS
        # ---------------------------------------------------------
        # CWD Calculations
        outDistanceRaster = lu.get_cwd_path(sourceCore)
        # Check if climate tool is calling linkage mapper
        if cfg.TOOL == cfg.TOOL_CC:
            back_rast = outDistanceRaster.replace("cwd_", "back_")
        else:
            back_rast = "BACK"
            lu.delete_data(path.join(coreDir, back_rast))
            lu.delete_data(outDistanceRaster)
            start_time = time.clock()

            # Create raster that just has source core in it
            # Note: this seems faster than setnull with LI grid.
            SRCRASTER = 'source' + tif
            lu.delete_data(path.join(coreDir,SRCRASTER))
            if arcpy:
                statement = ('conRaster = '
                             'Con(Raster(cfg.CORERAS) == int(sourceCore), 1);'
                             'conRaster.save(SRCRASTER)')
            else:
                expression = ("con(" + cfg.CORERAS + " == " +
                               str(int(sourceCore)) + ", 1)")
                statement = ('gp.SingleOutputMapAlgebra_sa'
                            '(expression, SRCRASTER)')

            try:
                exec statement
                randomerror()
            except:
                failures = lu.print_arcgis_failures(statement, failures)
                if failures < 20:
                    return None, failures, lcpLoop
                else: exec statement

            # Cost distance raster creation
            if arcpy:
                arcpy.env.extent = "MINOF"
            else:
                gp.Extent = "MINOF"

            lu.delete_data(path.join(coreDir,"BACK"))
            
            if arcpy:
                statement = ('outCostDist = CostDistance(SRCRASTER, '
                             'bResistance, cfg.TMAXCWDIST, back_rast);'
                             'outCostDist.save(outDistanceRaster)')
            else:
                statement = ('gp.CostDistance_sa(SRCRASTER, bResistance, '
                             'outDistanceRaster, cfg.TMAXCWDIST, back_rast)')
            try:
                exec statement
                randomerror()
            except:
                failures = lu.print_arcgis_failures(statement, failures)
                if failures < 20:
                    return None, failures, lcpLoop
                else:
                    exec statement

        start_time = time.clock()
        # Extract cost distances from source core to target cores
        # Fixme: there will be redundant calls to b-a when already
        # done a-b
        ZNSTATS = path.join(coreDir, "zonestats.dbf")
        lu.delete_data(ZNSTATS)
        #Fixme: zonalstatistics is returning integer values for minimum. Why???
        #Extra zonalstatistics code implemented later in script to correct
        #values.
        if arcpy:
            statement = ('outZSaT = ZonalStatisticsAsTable(cfg.CORERAS, '
                    '"VALUE", outDistanceRaster,ZNSTATS, "DATA", "MINIMUM")')
        else:
            statement = ('gp.zonalstatisticsastable_sa('
                      'cfg.CORERAS, "VALUE", outDistanceRaster, ZNSTATS)')
                      
        try:
            exec statement
            randomerror()
        except:
            failures = lu.print_arcgis_failures(statement, failures)
            if failures < 20:
                return None,failures,lcpLoop
            else:
                if cfg.TOOL == cfg.TOOL_CC:
                    msg = ('ERROR in Zonal Stats. Please restart ArcMap '
                        'and try again.')                
                else:
                    msg = ('ERROR in Zonal Stats. Restarting ArcMap '
                        'then restarting Linkage Mapper at step 3 usually\n'
                        'solves this one so please restart and try again.')

                lu.raise_error(msg)
        tableRows = gp.searchcursor(ZNSTATS)
        tableRow = tableRows.Next()
        while tableRow:
            if tableRow.Value > sourceCore:
                link = lu.get_links_from_core_pairs(linkTable,
                                                    sourceCore,
                                                    tableRow.Value)
                if linkTable[link,cfg.LTB_LINKTYPE] > 0: # valid link
                    linkTable[link,cfg.LTB_CWDIST] = tableRow.Min
                    if cfg.MAXCOSTDIST is not None:
                        if ((tableRow.Min > cfg.MAXCOSTDIST) and
                           (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP)):
                             # Disable link, it's too long
                            linkTable[link,cfg.LTB_LINKTYPE] = cfg.LT_TLLC
                    if cfg.MINCOSTDIST is not None:
                        if (tableRow.Min < cfg.MINCOSTDIST and
                           (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP)):
                            # Disable link, it's too short
                            linkTable[link,cfg.LTB_LINKTYPE] = cfg.LT_TSLC
            tableRow = tableRows.next()
        del tableRow, tableRows
        #start_time = lu.elapsed_time(start_time)

        # ---------------------------------------------------------
        # Check for intermediate cores AND map LCP lines
        for y in range(0,len(targetCores)):
            targetCore = targetCores[y]
            rows = lu.get_links_from_core_pairs(linkTable, sourceCore,
                                                targetCore)
            # Map all links for which we successfully extracted
            #  cwds in above code
            link = rows[0]
            if (linkTable[link,cfg.LTB_LINKTYPE] > 0 and
                linkTable[link,cfg.LTB_LINKTYPE] < 1000 and
                linkTable[link,cfg.LTB_CWDIST] != -1):
                # Flag so that we only evaluate this pair once
                linkTable[rows,cfg.LTB_LINKTYPE] = (linkTable
                                               [rows,cfg.LTB_LINKTYPE]
                                               + 1000)

                # Create raster that just has target core in it
                TARGETRASTER = 'targ' + tif
                lu.delete_data(path.join(coreDir,TARGETRASTER))
                try:
                    if arcpy:
                        # For climate corridors, errors occur when core raster
                        # overlaps null values in cwd rasters
                        statement = ('conRaster = Con(IsNull(outDistanceRaster'
                            '), Int(outDistanceRaster), Con(Raster'
                            '(cfg.CORERAS) == int(targetCore), 1));'
                            'conRaster.save(TARGETRASTER)') 
                        # statement = ('conRaster = Con(Raster('
                                    # 'cfg.CORERAS) == int(targetCore), 1);'
                                    # 'conRaster.save(TARGETRASTER)')

                    else:
                        expression = ("con(" + cfg.CORERAS + " == " +
                        str(int(targetCore)) + ",1)")
                        statement = ('gp.SingleOutputMapAlgebra_sa(expression,'
                                     ' TARGETRASTER)')
                    exec statement
                    randomerror()
                except:
                    failures = lu.print_arcgis_failures(statement, failures)
                    if failures < 20:
                        return None,failures,lcpLoop
                    else: exec statement
                # Execute ZonalStatistics to get more precise cw distance if
                # arc rounded it earlier (not critical, hence the try/pass)
                if (linkTable[link,cfg.LTB_CWDIST] ==
                                int(linkTable[link,cfg.LTB_CWDIST])):
                    try:
                        zonalRas = path.join(coreDir,'zonal')
                        gp.ZonalStatistics_sa(TARGETRASTER, "VALUE",
                            outDistanceRaster, zonalRas, "MINIMUM", "DATA")
                        minObject = gp.GetRasterProperties_management(zonalRas,
                                                                "MINIMUM")
                        rasterMin = float(str(minObject.getOutput(0)))
                        linkTable[link,cfg.LTB_CWDIST] = rasterMin
                        lu.delete_data(zonalRas)
                    except:
                        pass
                # Cost path maps the least cost path
                # between source and target
                lcpRas = path.join(coreDir,"lcp" + tif)
                lu.delete_data(lcpRas)

                # Note: costpath (both gp and arcpy versions) uses GDAL.               
                if arcpy:
                    statement = ('outCostPath = CostPath(TARGETRASTER,'
                          'outDistanceRaster, back_rast, "BEST_SINGLE", ""); '
                          'outCostPath.save(lcpRas)')
                else:
                    statement = ('gp.CostPath_sa(TARGETRASTER, '
                                 'outDistanceRaster, back_rast, '
                                 'lcpRas, "BEST_SINGLE", "")')
                try:
                    exec statement                    
                    randomerror()
                except:
                    failures = lu.print_arcgis_failures(statement, failures)
                    if failures < 20:
                        return None,failures,lcpLoop
                    else:
                        lu.dashline(1)
                        gprint('\nCost path is failing for Link #'
                           + str(int(link)) + ' connecting core areas ' +
                            str(int(sourceCore)) + ' and ' +
                            str(int(targetCore)) + '\n.'
                            'Retrying one more time in 5 minutes.')
                        lu.snooze(300)
                        exec statement
                
                # fixme: may be fastest to not do selection, do
                # EXTRACTBYMASK, getvaluelist, use code snippet at end
                # of file to discard src and target values. Still this
                # is fast- 13 sec for LI data...But I'm not very
                # comfortable using failed coreMin as our test....
                if (cfg.S3DROPLCCSic and
                    (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP) and
                    (linkTable[link,cfg.LTB_LINKTYPE] != cfg.LT_KEEP + 1000)):
                    # -------------------------------------------------
                    # Drop links where lcp passes through intermediate
                    # core area. Method below is faster than valuelist
                    # method because of soma in valuelist method.
                    # make a feature layer for input cores to select from
                    gp.MakeFeatureLayer(cfg.COREFC, cfg.FCORES)

                    gp.SelectLayerByAttribute(cfg.FCORES,
                                              "NEW_SELECTION",
                                              cfg.COREFN + ' <> ' +
                                              str(int(targetCore)) +
                                              ' AND ' + cfg.COREFN +
                                              ' <> ' +
                                              str(int(sourceCore)))

                    corePairRas = path.join(coreDir,"s3corepair"+ tif)
                    if arcpy:
                        arcpy.env.extent = cfg.BOUNDRESIS
                    else:
                        gp.extent = gp.Describe(cfg.BOUNDRESIS).extent


                    statement = ('gp.FeatureToRaster_conversion(cfg.FCORES, '
                                'cfg.COREFN, corePairRas, gp.cellSize)')
                    try:
                        exec statement
                        randomerror()
                    except:
                        failures = lu.print_arcgis_failures(statement,
                                                            failures)
                        if failures < 20:
                            return None,failures,lcpLoop
                        else: exec statement

                    #------------------------------------------
                    # Intermediate core test
                    try:
                        coreDetected = test_for_intermediate_core(coreDir,
                                                lcpRas, corePairRas)
                        randomerror()
                    except:
                        statement = 'test_for_intermediate_core'
                        failures = lu.print_arcgis_failures(statement,
                                                            failures)
                        if failures < 20:
                            return None,failures,lcpLoop
                        else:
                            coreDetected = test_for_intermediate_core(
                                        coreDir, lcpRas, corePairRas)

                    if coreDetected:
                        # lu.dashline()
                        gprint(
                            "Found an intermediate core in the "
                            "least-cost path between cores " +
                            str(int(sourceCore)) + " and " +
                            str(int(targetCore)) + ".  The corridor "
                            "will be removed.")
                        # disable link
                        rows = lu.get_links_from_core_pairs(linkTable,
                                                    sourceCore,
                                                    targetCore)
                        linkTable[rows,cfg.LTB_LINKTYPE] = cfg.LT_INT
                    #------------------------------------------

                # Create lcp shapefile.  lcploop just keeps track of
                # whether this is first time function is called.
                lcpLoop = lu.create_lcp_shapefile(coreDir, linkTable,
                                                  sourceCore, targetCore,
                                                  lcpLoop)

        # Made it through, so reset failure count and return.
        failures = 0
        lu.delete_dir(coreDir)
        # if cfg.TOOL == cfg.TOOL_CC:
            # lu.delete_data(back_rast)
        return linkTable, failures, lcpLoop

    # Return GEOPROCESSING specific errors
    except arcgisscripting.ExecuteError:
        lu.dashline(1)
        # gprint('****Failed in step 3. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        # gprint('****Failed in step 3. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 11
0
def main():
    """Iterates over LM, BM, and restoration tasks"""

    ## USER SETTINGS ######################################################
    ## Restoration Settings
    ## ALL input data must be in the same projection
    start_time = time.clock()
    restoreMaxROI = False # Set to True to restore highest ROI
                         # Set to False to restore strongest barrier
    restoredResistanceVal = 1 # Resistance value of restored habitat.  Must be 1 or greater.
    restorationDataGDB = "C:\\barrierClassAnalysis\\RestorationINPUTS_July2013.gdb" # No spaces or special chars in paths or gdb names
    outputDir = "C:\\barrierClassAnalysis\\output" # No spaces in path, avoid using dropbox or network drive
                                                   # Project directories will be created in this (iter1, iter2...)
                                                   # as will an output geodatabase
    resistanceRaster = "URWA_resis"# Resistance raster.  Should be in input GDB
    coreFC = 'URWA_HCAs_Doug_Grant'# Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant'
    coreFN = 'HCA_ID' # Core area field name
    
    radius = 450 # restoration radius in meters
    iterations = 13 # number of restorations to perform
    minAgThreshold = 0.75 # if less than this proportion of ag in circle, don't consider restoring circle
    minImprovementVal = 0 # Don't consider barriers below this improvement score (average improvement per meter diameter restored)
    parcelCostRaster = 'DougGrantParcelCost_m2_projected_90m' # Average per-m2 parcel cost per pixel. Snapped to resistance raster.
    restorationCostRaster = 'restCostPer_m2' # Right now this is just a raster with all pixels set to 0.113174
    agRaster = "ARESmaskp_projected" # 1=Ag, 0 = not Ag
    barrierCombineMethod = 'Maximum' # Some restorations benefit multiple corridors. 
                                     # 'Maximum' takes the greatest improvement across core area pairs
                                     # 'Sum' adds improvement scores acreoss all pairs. 
    cwdThresh = None # Use cwdThresh = None for no threshold. Use cwdThresh = X to not consider 
                      # restorations more than X map units away from each core area.
    ## END USER SETTINGS ######################################################
    try:
        # Setup path and create directories
        gprint('Hey! Make sure everything is in the same projection!\n')
        gprint('Setting up paths and creating directories')
        sys.path.append('..\\toolbox\\scripts')
        resRast = os.path.join(restorationDataGDB, resistanceRaster)   
        coreFCPath = os.path.join(restorationDataGDB, coreFC)

        # Set up a NEW output gdb (leave previous ones on drive)
        for i in range (1,200):
            outputGDB = 'restorationOutput'+str(i)+'.gdb'
            if not arcpy.Exists(os.path.join(outputDir,outputGDB)):
                break
            gprint('Previous output GDB '+ outputGDB +' exists.  Delete to save disk space.')    
        arcpy.CreateFileGDB_management(outputDir,outputGDB)
        outputGDB = os.path.join(outputDir,outputGDB)
        logFile = os.path.join(outputGDB,'Iterate Barriers'+str(i)+'.py')
        shutil.copyfile(__file__, logFile) #write a copy of this file to output dir as a record of settings
        
        arcpy.env.cellSize = resRast
        arcpy.env.extent = resRast
        arcpy.env.snapRaster = resRast
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = outputGDB
        arcpy.env.workspace = outputGDB
        
        spatialref = arcpy.Describe(resRast).spatialReference
        mapunits = spatialref.linearUnitName
        gprint('Cell size = ' + str(arcpy.env.cellSize) + ' ' + mapunits +'s')    
        
        
        # Calculate fraction of ag within radius of each pixel
        gprint('Calculating purchase cost, fraction of ag, etc within radius of each pixel.')
        agRaster = os.path.join(restorationDataGDB, agRaster)
        inNeighborhood = NbrCircle(radius, "MAP")
        arcpy.env.extent = agRaster
        outFocalStats = arcpy.sa.FocalStatistics(agRaster,
                                        inNeighborhood, "MEAN","NODATA")
        proportionAgRaster = os.path.join(outputGDB,'proportionAgRas')
        outFocalStats.save(proportionAgRaster)    
        arcpy.env.extent = resRast

        # Calculate purchase cost of circles
        parcelCostRaster = os.path.join(restorationDataGDB, parcelCostRaster)
        arcpy.env.extent = parcelCostRaster
        outFocalStats = arcpy.sa.FocalStatistics(parcelCostRaster,inNeighborhood, "MEAN","DATA")
        costFocalStatsRaster = os.path.join(outputGDB,'costFocalStatsRaster')
        outFocalStats.save(costFocalStatsRaster)
        arcpy.env.extent = resRast
        
        circleArea = float(npy.pi * radius * radius)
        outras = (Raster(costFocalStatsRaster) * circleArea)
        purchCostRaster = os.path.join(outputGDB,'purchaseCostRaster')
        outras.save(purchCostRaster)
        lu.delete_data(costFocalStatsRaster)
        
        # restCost = npy.pi * radius * radius * restCostPer_m2
        restorationCostRaster = os.path.join(restorationDataGDB, restorationCostRaster)
        outras = Raster(purchCostRaster) + (Raster(restorationCostRaster) * radius * radius * npy.pi)
        totalCostRaster = os.path.join(outputGDB,'totalCostRaster')
        outras.save(totalCostRaster)
        # lu.build_stats(totalCostRaster)
        
        # Create mask to remove areas without cost data
        arcpy.env.extent = totalCostRaster
        costMaskRaster = os.path.join(outputGDB,'costMaskRaster')
        costThresh = 0
        outCon = arcpy.sa.Con((Raster(totalCostRaster) > float(costThresh)), 1)
        outCon.save(costMaskRaster)
        arcpy.env.extent = resRast
        
        # Create mask to remove areas below ag threshold
        outCon = arcpy.sa.Con((Raster(proportionAgRaster) > float(minAgThreshold)), 1)
        agMaskRaster = os.path.join(outputGDB, 'agMaskRaster')
        outCon.save(agMaskRaster)       
        
        doStep1 = 'true'
        doStep2 = 'true'
        doStep5 = 'false'
        for iter in range(1,iterations+1): #xxx
            start_time1 = time.clock()
            arcpy.env.cellSize = resRast # Some env settings get changed by linkage mapper and must be reset here
            arcpy.env.extent = resRast
            arcpy.env.snapRaster = resRast
            arcpy.env.overwriteOutput = True
            arcpy.env.scratchWorkspace = outputGDB
            arcpy.env.workspace = outputGDB

            lu.dashline(1)
            gprint('Running iteration number '+str(iter))
            projDir = os.path.join(outputDir,'iter' + str(iter)+'Proj')    
            lu.create_dir(outputDir)
            lu.delete_dir(projDir) #xxx
            lu.create_dir(projDir)
            if iter > 1: # Copy previous s2 linktable to new project directory
                datapassDir = os.path.join(projDir,'datapass')
                lu.create_dir(datapassDir)
                projDir1 = os.path.join(outputDir,'iter1Proj')
                datapassDirIter1 = os.path.join(projDir1,'datapass')
                s2LinktableIter1 = os.path.join(datapassDirIter1 ,'linkTable_s2.csv')
                s2LinkTable = os.path.join(datapassDir ,'linkTable_s2.csv')
                shutil.copyfile(s2LinktableIter1, s2LinkTable)

            
            # Run Linkage Mapper
            distFile = os.path.join(outputDir, coreFC + '_dists.txt') # Copy distances text file from earlier LM run to the output directory- speeds things up!
            if not os.path.exists(distFile):
                if iter == 1:
                    gprint('Will calculate distance file.')
                    distFile = '#'
                else:
                    projDir1 = os.path.join(outputDir,'iter1Proj')
                    distFile1 = os.path.join(projDir1, coreFC + '_dists.txt')
                    shutil.copyfile(distFile1,distFile) # Put a copy here for future runs
                    
            arcpy.env.overwriteOutput = True
            arcpy.env.scratchWorkspace = outputGDB
            arcpy.env.workspace = outputGDB

            argv = ('lm_master.py', projDir, coreFCPath, coreFN, resRast,
                    doStep1, doStep2, 'Cost-Weighted & Euclidean', distFile,
                    'true', 'true', 'false', '4', 'Cost-Weighted', 'true',
                    doStep5, 'true', '200000', '10000', '#', '#', '#', '#') 
            gprint('Running ' + str(argv))
            cfg.lm_configured = False  # Insures lm_master uses current argv
            lm_master.lm_master(argv)    #xxx
            doStep1 = 'false' # Can skip for future iterations
            doStep2 = 'false' # Can skip for future iterations        
            doStep5 = 'false' # Skipping for future iterations
            
            startRadius = str(radius)
            endRadius = str(radius)
            radiusStep = '0'
            saveRadiusRasters= 'false'
            writePctRasters = 'false'

            argv = ('barrier_master.py', projDir, resRast, startRadius, endRadius, radiusStep, barrierCombineMethod,
                    saveRadiusRasters, writePctRasters, cwdThresh)
            gprint('Running ' + str(argv))
            barrier_master.bar_master(argv) #xxx

            arcpy.env.cellSize = resRast # Some env settings get changed by linkage mapper and must be reset here
            arcpy.env.extent = resRast
            arcpy.env.snapRaster = resRast
            arcpy.env.overwriteOutput = True
            arcpy.env.scratchWorkspace = outputGDB
            arcpy.env.workspace = outputGDB
            
            gprint('Finding restoration circles with max barrier score / ROI')
            # Find points with max ROI
            PREFIX = os.path.basename(projDir)
            if barrierCombineMethod == 'Sum':
                sumSuffix = 'Sum'
            else:
                sumSuffix = ''
            barrierFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius))
            barrierRaster = os.path.join(projDir,'output','barriers.gdb',barrierFN)
            if not arcpy.Exists(barrierRaster):
                msg = ('Error: cannot find barrier output: '+barrierRaster)
                lu.raise_error(msg)

            # arcpy.env.cellSize = agMaskRaster
            # arcpy.env.extent = agMaskRaster

            if iter > 1:
                gprint('Creating mask for previously restored areas')
                inNeighborhood = NbrCircle(radius, "MAP")
                arcpy.env.extent = allRestoredAreasRaster
                outFocalStats = arcpy.sa.FocalStatistics(allRestoredAreasRaster,inNeighborhood, "MEAN","DATA")
                allRestoredFocalRaster = os.path.join(outputGDB,'allRestFocRas_iter'+str(iter))
                outFocalStats.save(allRestoredFocalRaster) # Anything > 0 would include a restored area and 
                arcpy.env.extent = resRast
                restMaskRaster = os.path.join(outputGDB,'restMaskRaster_iter'+str(iter))
                minval = 0
                outCon = arcpy.sa.Con((Raster(allRestoredFocalRaster) == float(minval)), 1)
                outCon.save(restMaskRaster)
                
            # Candidate areas have not been restored, have cost data, meet
            # minimum improvement score criteria, and have enough ag in them
            candidateBarrierRaster = os.path.join(outputGDB, 'candidateBarrierRaster' + '_iter'+str(iter))
            if iter > 1:
                gprint('Creating candidate restoration raster using barrier results, previous restorations, and selection criteria')
                outCalc = (Raster(costMaskRaster) * Raster(agMaskRaster) * Raster(barrierRaster) * Raster(restMaskRaster) * (radius * 2)) # ROI scores will be in terms of total improvement (= score * diameter)
            else:
                outCalc = (Raster(costMaskRaster) * Raster(agMaskRaster) * Raster(barrierRaster) * radius * 2)
            
            minBarrierScore = minImprovementVal * radius * 2
            if restoredResistanceVal != 1:
                outCalc2 = (outCalc - (2 * radius * (restoredResistanceVal - 1)))
                outCon = arcpy.sa.Con((outCalc2 >= float(minBarrierScore)), outCalc2)
            else:
                outCon = arcpy.sa.Con((outCalc >= float(minBarrierScore)), outCalc)
            outCon.save(candidateBarrierRaster)
            lu.build_stats(candidateBarrierRaster)
            
            purchaseRoiRaster = os.path.join(outputGDB, 'purchaseRoiRaster' + '_iter'+str(iter))
            outCalc = Raster(candidateBarrierRaster) / Raster(purchCostRaster) 
            outCalc.save(purchaseRoiRaster)
            lu.build_stats(purchaseRoiRaster)
            
            totalRoiRaster = os.path.join(outputGDB, 'purchaseRestRoiRaster' + '_iter'+str(iter))
            outCalc = Raster(candidateBarrierRaster) / Raster(totalCostRaster)
            outCalc.save(totalRoiRaster)
            lu.build_stats(totalRoiRaster)

            maxBarrier = arcpy.GetRasterProperties_management(candidateBarrierRaster,"MAXIMUM")
            gprint('Maximum barrier improvement score: '+str(maxBarrier.getOutput(0)))
            if maxBarrier < 0:
                arcpy.AddWarning("\nNo barriers found that meet CWD or Ag threshold criteria.")
            
            maxPurchROI = arcpy.GetRasterProperties_management(purchaseRoiRaster,"MAXIMUM")
            gprint('Maximum purchase ROI score: '+str(maxPurchROI.getOutput(0)))

            maxROI = arcpy.GetRasterProperties_management(totalRoiRaster,"MAXIMUM")
            gprint('Maximum total ROI score: '+str(maxROI.getOutput(0)))

            if restoreMaxROI:
                outPoint = os.path.join(outputGDB, 'maxRoiPoint'+'_iter'+str(iter))
                gprint('Choosing circle with maximum ROI to restore')
                outCon = arcpy.sa.Con((Raster(totalRoiRaster) >= float(maxROI.getOutput(0))), totalRoiRaster)
                maxRoiRaster = os.path.join(outputGDB, 'maxRoiRaster')
                outCon.save(maxRoiRaster)    
                # Save max ROI to point
                try:
                    arcpy.RasterToPoint_conversion(maxRoiRaster, outPoint)
                except:
                    msg = ('Error: it looks like there are no viable restoration candidates.')
                    lu.raise_error(msg)
        
            else: #Restoring strongest barrier instead
                outPoint = os.path.join(outputGDB, 'maxBarrierPoint'+'_iter'+str(iter))
                gprint('Choosing circle with maximum BARRIER IMPROVEMENT SCORE to restore')
                outCon = arcpy.sa.Con((Raster(candidateBarrierRaster) >= float(maxBarrier.getOutput(0))), candidateBarrierRaster)
                maxBarrierRaster = os.path.join(outputGDB, 'maxBarrierRaster')
                outCon.save(maxBarrierRaster)            
                # Save max barrier to point
                try:
                    arcpy.RasterToPoint_conversion(maxBarrierRaster, outPoint)
                except:
                    msg = ('Error: it looks like there are no viable restoration candidates.')
                    lu.raise_error(msg)            
            
            gprint('Done evaluating candidate restorations')
            result = int(arcpy.GetCount_management(outPoint).getOutput(0)) 
            if result > 1:
                arcpy.AddWarning('Deleting points with identical ROI/improvement score values') # Would be better to retain point with max barrier score when we have multiple points with same ROI
                arcpy.DeleteIdentical_management(outPoint, "grid_code", 0.1, 0.1)            
            arcpy.sa.ExtractMultiValuesToPoints(outPoint, 
                [[candidateBarrierRaster, "barrierScore"],[purchCostRaster, "purchCost"],
                [totalCostRaster, "totalCost"],[purchaseRoiRaster, "purchaseROI"],
                [totalRoiRaster, "totalROI"]], "NONE")
            arcpy.AddField_management(outPoint, "restorationNumber", "SHORT")
            arcpy.CalculateField_management(outPoint, "restorationNumber", iter)        
            arcpy.AddField_management(outPoint, "radius", "DOUBLE")
            arcpy.CalculateField_management(outPoint, "radius", radius)        
            arcpy.AddField_management(outPoint, "barrierScore_per_m", "DOUBLE")
            arcpy.CalculateField_management(outPoint, "barrierScore_per_m", "(float(!barrierScore!) / (!radius! * 2))", "PYTHON")        

            gprint('\nCreating restoration circles')
            if restoreMaxROI:
                circleFC = os.path.join(outputGDB, 'maxRoiCircle'+'_iter'+str(iter))
            else:
                circleFC = os.path.join(outputGDB, 'maxBarrierCircle'+'_iter'+str(iter))
            arcpy.Buffer_analysis(outPoint, circleFC, radius)
            gprint('Rasterizing restoration circles')
            if restoreMaxROI:
                circleRas = os.path.join(outputGDB, 'maxRoiCircleRas'+'_iter'+str(iter))
            else:
                circleRas = os.path.join(outputGDB, 'maxBarrierCircleRas'+'_iter'+str(iter))
            arcpy.FeatureToRaster_conversion(circleFC, 'totalROI', circleRas, arcpy.env.cellSize)    

            # restore raster
            gprint('Digitally restoring resistance raster')
            resRastRestored = os.path.join(outputGDB, 'resRastRestored'+'_iter'+str(iter))
            outCon = arcpy.sa.Con(IsNull(circleRas), resRast, restoredResistanceVal)
            outCon.save(resRastRestored)

            allRestoredAreasRaster = os.path.join(outputGDB, 'allRestoredAreas_iter'+str(iter))
            PrevRestoredAreasRaster= os.path.join(outputGDB, 'allRestoredAreas_iter'+str(iter-1))
            if iter == 1:
                outCon = arcpy.sa.Con(IsNull(circleRas), 0, 1)
            else:
                outCon = arcpy.sa.Con(IsNull(circleRas), PrevRestoredAreasRaster, 1) # Add this restoration to areas restored
            outCon.save(allRestoredAreasRaster)
            
            lu.delete_data(circleRas)
            resRast = resRastRestored # Use for next iteration resistance raster
            
            #Add circle into feature class with all circles
            if restoreMaxROI:
                allCirclesFC = os.path.join(outputGDB,"allCirclesMaxROI")
            else:
                allCirclesFC = os.path.join(outputGDB,"allCirclesMaxBarriers")
            if iter == 1:
                arcpy.CopyFeatures_management(circleFC, allCirclesFC)
            else: 
                arcpy.Append_management(circleFC, allCirclesFC, "TEST")           
            gprint('Finished iteration #'+str(iter))
            start_time1 = lu.elapsed_time(start_time1)    

        gprint('\nDone with iterations.')
        start_time = lu.elapsed_time(start_time)    
        gprint('Outputs saved in: '+outputGDB)
        gprint('Back up your project directories if you want to save corridor/barrier results.')

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)        
Exemplo n.º 12
0
def lm_master(argv=None):
    """Main function for linkage mapper.

    Called by ArcMap with parameters or run from command line with parameters
    entered in script below.  Calls functions in dedicated scripts for each of
    5 processing steps.

    """
    # Setup global variables
    if cfg.TOOL != cfg.TOOL_CC:
        if argv is None:
            argv = sys.argv
        cfg.configure(cfg.TOOL_LM, argv)

    gprint = lu.gprint

    try:
        # Move results from earlier versions to new directory structure
        lu.move_old_results()
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # Create output directories if they don't exist
        lu.create_dir(cfg.OUTPUTDIR)
        lu.create_dir(cfg.LOGDIR)
        lu.create_dir(cfg.MESSAGEDIR)
        lu.create_dir(cfg.DATAPASSDIR)
        if cfg.TOOL != cfg.TOOL_CC:
            lu.delete_dir(cfg.SCRATCHDIR)
            lu.create_dir(cfg.SCRATCHDIR)
        lu.create_dir(cfg.ARCSCRATCHDIR)
        if cfg.TOOL == cfg.TOOL_LM:
            cfg.logFilePath = lu.create_log_file(cfg.PARAM_NAMES, argv)
            lu.write_custom_to_log(cfg.LMCUSTSETTINGS)
            lu.log_metadata(cfg.COREFC, [cfg.RESRAST_IN])

        lu.print_drive_warning()

        if cfg.CONNECTFRAGS:
            lu.dashline(1)
            lu.warn('Custom mode: will run steps 1-2 ONLY to cluster core polygons within ')
            lu.warn('the maximum Euclidean corridor distance from one another ')
            lu.warn('into polygons with a single cluster_ID value.')
            lu.warn('Make sure you have set a Maximum Euclidean corridor distance.')
            lu.dashline(2)
            cfg.STEP3 = False
            cfg.STEP4 = False
            cfg.STEP5 = False
            if cfg.MAXEUCDIST == None:
                raise RuntimeError('Maximum Euclidean distance required '
                                   'for custom cluster mode.')

        # Set data frame spatial reference to coordinate system of input data
        # Problems arise in this script (core raster creation) and in S2
        # (generate near table) if they differ.
        lu.set_dataframe_sr()

        # Check core ID field and project directory name.
        lu.check_cores(cfg.COREFC, cfg.COREFN)
        lu.check_project_dir()

        # Identify first step cleanup link tables from that point
        lu.dashline(1)
        if cfg.STEP1:
            gprint('Starting at step 1.')
            firststep = 1
        elif cfg.STEP2:
            gprint('Starting at step 2.')
            firststep = 2
        elif cfg.STEP3:
            gprint('Starting at step 3.')
            firststep = 3
            linkTableFile = lu.get_prev_step_link_table(step=3)  # Check exists
        elif cfg.STEP4:
            gprint('Starting at step 4.')
            firststep = 4
            linkTableFile = lu.get_prev_step_link_table(step=4)  # Check exists
        elif cfg.STEP5:
            gprint('Starting at step 5.')
            firststep = 5
            linkTableFile = lu.get_prev_step_link_table(step=5)  # Check exists
        lu.clean_up_link_tables(firststep)

        # Make a local grid copy of resistance raster for cwd runs-
        # will run faster than gdb.
        # Don't know if raster is in a gdb if entered from TOC
        lu.delete_data(cfg.RESRAST)
        gprint('\nMaking temporary copy of resistance raster for this run.')
        arcpy.env.outputCoordinateSystem = arcpy.Describe(cfg.COREFC).SpatialReference
        arcpy.env.extent = arcpy.Describe(cfg.RESRAST_IN).Extent
        arcpy.env.snapRaster = cfg.RESRAST_IN
        arcpy.env.cellSize = arcpy.Describe(cfg.RESRAST_IN).MeanCellHeight
        try:
            arcpy.CopyRaster_management(cfg.RESRAST_IN, cfg.RESRAST)
        except Exception:
            msg = ('ERROR: Could not make a copy of your resistance raster. ' +
                    'Try re-starting ArcMap to release the file lock.')
            lu.raise_error(msg)

        if (cfg.STEP1) or (cfg.STEP3):
            # Make core raster file
            gprint('\nMaking temporary raster of core file for this run.')
            lu.delete_data(cfg.CORERAS)
            arcpy.FeatureToRaster_conversion(cfg.COREFC, cfg.COREFN,
                          cfg.CORERAS, arcpy.Describe(cfg.RESRAST).MeanCellHeight)

        def delete_final_gdb(finalgdb):
            """Deletes final geodatabase"""
            if arcpy.Exists(finalgdb) and cfg.STEP5:
                try:
                    lu.clean_out_workspace(finalgdb)
                except Exception:
                    lu.dashline(1)
                    msg = ('ERROR: Could not remove contents of geodatabase ' +
                           finalgdb + '. \nIs it open in ArcMap? You may '
                           'need to re-start ArcMap to release the file lock.')
                    lu.raise_error(msg)
                lu.delete_dir(finalgdb)

        # Delete final output geodatabase
        delete_final_gdb(cfg.OUTPUTGDB_OLD)
        delete_final_gdb(cfg.OUTPUTGDB)
        delete_final_gdb(cfg.EXTRAGDB)
        delete_final_gdb(cfg.LINKMAPGDB)


        # Run linkage mapper processing steps
        if cfg.STEP1:
            s1.STEP1_get_adjacencies()
        if cfg.STEP2:
            s2.STEP2_build_network()
        if cfg.STEP3:
            s3.STEP3_calc_cwds()
        if cfg.STEP4:
            s4.STEP4_refine_network()
        if cfg.STEP5:
            s5.STEP5_calc_lccs()
            lu.dashline()
            gprint('Results from this run can be found in your output '
                    'directory:')
            gprint(cfg.OUTPUTDIR)

        # Clean up
        lu.delete_dir(cfg.SCRATCHDIR)

        arcpy.AddMessage('\nDone with linkage mapping.\n')


    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.exit_with_python_error(_SCRIPT_NAME)

    finally:
        lu.dashline()
        gprint('A record of run settings and messages can be found in your '
               'log directory:')
        gprint(cfg.MESSAGEDIR)
        lu.dashline(2)
        lu.close_log_file()
Exemplo n.º 13
0
def STEP7_calc_centrality():
    """ Analyze network centrality using Circuitscape
        given Linkage Mapper outputs

    """
    try:
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)

        arcpy.env.workspace = cfg.SCRATCHDIR

        # Check for valid LCP shapefile
        prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep = 7)
        if not arcpy.Exists(prevLcpShapefile):
            msg = ('Cannot find an LCP shapefile from step 5.  Please '
                    'rerun that step and any previous ones if necessary.')
            lu.raise_error(msg)

        # Remove lcp shapefile from this step if run previously
        lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s7.shp")
        lu.delete_data(lcpShapefile)

        csPath = lu.get_cs_path()

        invalidFNs = ['fid','id','oid','shape']
        if cfg.COREFN.lower() in invalidFNs:
        #if cfg.COREFN == 'FID' or cfg.COREFN == 'ID':
            lu.dashline(1)
            msg = ('ERROR: Core area field names ID, FID, SHAPE, and OID are'
                    ' reserved for ArcGIS. \nPlease choose another field- must'
                    ' be a positive integer.')
            lu.raise_error(msg)

        lu.dashline(1)
        gprint('Mapping centrality of network cores and links'
                '\nusing Circuitscape....')
        lu.dashline(0)

        # set the analysis extent and cell size to that of the resistance
        # surface
        coreCopy =  path.join(cfg.SCRATCHDIR, 'cores.shp')

        arcpy.CopyFeatures_management(cfg.COREFC, coreCopy)
        arcpy.AddField_management(coreCopy, "CF_Central", "DOUBLE", "10", "2")

        inLinkTableFile = lu.get_prev_step_link_table(step=7)
        linkTable = lu.load_link_table(inLinkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg =('\nThere are no linkages. Bailing.')
            lu.raise_error(msg)

        if linkTable.shape[1] < 16: # If linktable has no entries from prior
                                    # centrality or pinchpint analyses
            extraCols = npy.zeros((numLinks, 6), dtype="float64")
            linkTable = linkTable[:,0:10]
            linkTable = npy.append(linkTable, extraCols, axis=1)
            linkTable[:, cfg.LTB_LCPLEN] = -1
            linkTable[:, cfg.LTB_CWDEUCR] = -1
            linkTable[:, cfg.LTB_CWDPATHR] = -1
            linkTable[:, cfg.LTB_EFFRESIST] = -1
            linkTable[:, cfg.LTB_CWDTORR] = -1
            del extraCols

        linkTable[:, cfg.LTB_CURRENT] = -1

        coresToProcess = npy.unique(linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        maxCoreNum = max(coresToProcess)
        del coresToProcess

        lu.dashline(0)

        coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1]
        coreList = npy.sort(coreList)
        #gprint('There are ' + str(len(npy.unique(coreList))) ' core areas.')

        # set up directory for centrality
        INCENTRALITYDIR = cfg.CENTRALITYBASEDIR
        OUTCENTRALITYDIR = path.join(cfg.CENTRALITYBASEDIR,
                                     cfg.CIRCUITOUTPUTDIR_NM)
        CONFIGDIR = path.join(INCENTRALITYDIR, cfg.CIRCUITCONFIGDIR_NM)

        # Set Circuitscape options and write config file
        options = lu.setCircuitscapeOptions()
        options['data_type']='network'
        options['habitat_file'] = path.join(INCENTRALITYDIR,
                                            'Circuitscape_graph.txt')
        # Setting point file equal to graph to do all pairs in Circuitscape
        options['point_file'] = path.join(INCENTRALITYDIR,
                                          'Circuitscape_graph.txt')
        outputFN = 'Circuitscape_network.out'
        options['output_file'] = path.join(OUTCENTRALITYDIR, outputFN)
        configFN = 'Circuitscape_network.ini'
        outConfigFile = path.join(CONFIGDIR, configFN)
        lu.writeCircuitscapeConfigFile(outConfigFile, options)

        delRows = npy.asarray(npy.where(linkTable[:,cfg.LTB_LINKTYPE] < 1))
        delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32")
        delRowsVector[:] = delRows[0, :]
        LT = lu.delete_row(linkTable, delRowsVector)
        del delRows
        del delRowsVector
        graphList = npy.zeros((LT.shape[0],3), dtype="float64")
        graphList[:,0] = LT[:,cfg.LTB_CORE1]
        graphList[:,1] = LT[:,cfg.LTB_CORE2]
        graphList[:,2] = LT[:,cfg.LTB_CWDIST]

        write_graph(options['habitat_file'] ,graphList)
        gprint('\nCalculating current flow centrality using Circuitscape...')
        #subprocess.call([csPath, outConfigFile], shell=True)   
        
        memFlag = call_circuitscape(csPath, outConfigFile)        
        
        outputFN = 'Circuitscape_network_branch_currents_cum.txt'
        currentList = path.join(OUTCENTRALITYDIR, outputFN)

        if not arcpy.Exists(currentList):
            write_graph(options['habitat_file'] ,graphList)
            gprint('\nCalculating current flow centrality using Circuitscape '
                   '(2nd try)...')
            # subprocess.call([csPath, outConfigFile], shell=True)  
            memFlag = call_circuitscape(csPath, outConfigFile)                    
            if not arcpy.Exists(currentList):
                lu.dashline(1)
                msg = ('ERROR: No Circuitscape output found.\n'
                       'It looks like Circuitscape failed.')
                arcpy.AddError(msg)
                lu.write_log(msg)
                exit(1)

                
        currents = load_graph(currentList,graphType='graph/network',
                              datatype='float64')

        numLinks = currents.shape[0]
        for x in range(0,numLinks):
            corex = currents[x,0]
            corey = currents[x,1]

            #linkId = LT[x,cfg.LTB_LINKID]
            row = lu.get_links_from_core_pairs(linkTable, corex, corey)
            #row = lu.get_linktable_row(linkId, linkTable)
            linkTable[row,cfg.LTB_CURRENT] = currents[x,2]

        coreCurrentFN = 'Circuitscape_network_node_currents_cum.txt'
        nodeCurrentList = path.join(OUTCENTRALITYDIR, coreCurrentFN)
        nodeCurrents = load_graph(nodeCurrentList,graphType='graph/network',
                              datatype='float64')

        numNodeCurrents = nodeCurrents.shape[0]
        rows = arcpy.UpdateCursor(coreCopy)
        row = rows.newRow()
        for row in rows:
            coreID = row.getValue(cfg.COREFN)
            for i in range (0, numNodeCurrents):
                if coreID == nodeCurrents[i,0]:
                    row.setValue("CF_Central", nodeCurrents[i,1])
                    break
            rows.updateRow(row)
            #row = rows.newRow()
        del row, rows
        gprint('Done with centrality calculations.')

        finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=5,
                                                  thisStep=7)
        linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv")
        lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile)
        linkTableFinalFile = path.join(cfg.OUTPUTDIR,
                                       cfg.PREFIX + "_linkTable_s5_plus.csv")
        lu.write_link_table(finalLinkTable,
                            linkTableFinalFile, inLinkTableFile)
        gprint('Copy of final linkTable written to '+
                          linkTableFinalFile)

        finalCoreFile = path.join(cfg.CORECENTRALITYGDB,
                                     cfg.PREFIX + '_Cores')
        #copy core area map to gdb.
        if not arcpy.Exists(cfg.CORECENTRALITYGDB):
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR,
                             path.basename(cfg.CORECENTRALITYGDB))
        arcpy.CopyFeatures_management(coreCopy, finalCoreFile)

        gprint('Creating shapefiles with linework for links.')
        lu.write_link_maps(linkTableFinalFile, step=7)

        # Copy final link maps to gdb and clean up.
        lu.copy_final_link_maps(step=7)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 7. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 7. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 14
0
def bar_master(argv=None):
    """Detect barriers using CWD outputs from Linkage Mapper tool."""
    if argv is None:
        argv = sys.argv
    cfg.configure(cfg.TOOL_BM, argv)
    gprint = lu.gprint

    try:

        lu.create_dir(cfg.LOGDIR)
        lu.create_dir(cfg.MESSAGEDIR)

        cfg.logFilePath = lu.create_log_file(cfg.PARAM_NAMES, argv)
        lu.log_metadata(rasters=[cfg.RESRAST_IN])

        lu.print_drive_warning()

        # Move adj and cwd results from earlier versions to datapass directory
        lu.move_old_results()

        lu.create_dir(cfg.OUTPUTDIR)
        lu.delete_dir(cfg.SCRATCHDIR)
        lu.create_dir(cfg.SCRATCHDIR)
        lu.create_dir(cfg.ARCSCRATCHDIR)
        lu.dashline(1)
        gprint('\nMaking local copy of resistance raster.')
        lu.delete_data(cfg.RESRAST)

        arcpy.env.extent = arcpy.Describe(cfg.RESRAST_IN).Extent
        desc = arcpy.Describe(cfg.RESRAST_IN)
        if hasattr(desc, "catalogPath"):
            cfg.RESRAST_IN = arcpy.Describe(cfg.RESRAST_IN).catalogPath
        try:
            arcpy.CopyRaster_management(cfg.RESRAST_IN, cfg.RESRAST)
        except arcpy.ExecuteError:
            msg = ('ERROR: Could not make a copy of your resistance raster. '
                   'Try re-starting ArcMap to release the file lock.')
            lu.raise_error(msg)

        arcpy.env.snapRaster = cfg.RESRAST

        if cfg.BARRIER_METH_MAX:
            cfg.SUM_BARRIERS = False
            lu.dashline(1)
            gprint('Calculating MAXIMUM barrier effects across core area '
                   'pairs')
            s6.step6_calc_barriers()

        if cfg.BARRIER_METH_SUM:
            cfg.SUM_BARRIERS = True
            gprint('')
            lu.dashline()
            gprint('Calculating SUM of barrier effects across core area pairs')
            s6.step6_calc_barriers()

        # Clean up
        lu.delete_dir(cfg.SCRATCHDIR)
        if not cfg.SAVEBARRIERDIR:
            lu.delete_dir(cfg.BARRIERBASEDIR)
        gprint('\nDone with barrier mapping.\n')

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 15
0
def run_analysis():
    """Run main Linkage Priority analysis."""
    lm_util.gprint("Checking inputs")

    # check that LM finished with steps 3 and 5
    if not os.path.exists(os.path.join(lm_env.DATAPASSDIR, "linkTable_s3.csv")) or\
            not os.path.exists(os.path.join(lm_env.DATAPASSDIR, "linkTable_s5.csv")):
        msg = (
            "ERROR: Project directory must contain a successful Linkage Mapper run with Steps 3 and 5."
        )
        raise Exception(msg)

    # check/create gdb for scratch
    if not os.path.isdir(lm_env.SCRATCHDIR):
        os.makedirs(lm_env.SCRATCHDIR)
    if not arcpy.Exists(os.path.join(lm_env.SCRATCHDIR, "scratch.gdb")):
        arcpy.CreateFileGDB_management(lm_env.SCRATCHDIR, "scratch.gdb")
    arcpy.env.scratchWorkspace = os.path.join(lm_env.SCRATCHDIR, "scratch.gdb")

    # check/create gdb for intermediate
    if lp_env.KEEPINTERMEDIATE:
        if not arcpy.Exists(os.path.join(lm_env.SCRATCHDIR,
                                         "intermediate.gdb")):
            arcpy.CreateFileGDB_management(lm_env.SCRATCHDIR,
                                           "intermediate.gdb")

    # set key dataset locations
    lcp_lines = os.path.join(lm_env.OUTPUTDIR, "link_maps.gdb",
                             lm_env.PREFIX + "_LCPs")
    cpv_raster = os.path.join(lm_env.OUTPUTGDB, lm_env.PREFIX + "_CPV")
    rci_raster = os.path.join(lm_env.OUTPUTGDB, lm_env.PREFIX + "_RCI")
    cutoff_text = str(lm_env.CWDTHRESH)
    if cutoff_text[-6:] == "000000":
        cutoff_text = cutoff_text[0:-6] + "m"
    elif cutoff_text[-3:] == "000":
        cutoff_text = cutoff_text[0:-3] + "k"
    trunc_raster = (lm_env.OUTPUTGDB + "\\" + lm_env.PREFIX +
                    "_corridors_truncated_at_" + cutoff_text)
    norm_trunc_raster = os.path.join(lm_env.OUTPUTGDB,
                                     lm_env.PREFIX + "_NORMTRUNC")
    lp_raster = os.path.join(lm_env.OUTPUTGDB,
                             lm_env.PREFIX + "_linkage_priority")
    bp_raster = os.path.join(lm_env.OUTPUTGDB,
                             lm_env.PREFIX + "_blended_priority")

    # calc permeability
    calc_permeability(lcp_lines)

    # calc relative closeness
    calc_closeness(lcp_lines)

    # invert and normalize each corridor
    inv_norm()

    # calc Core Area Value (CAV) and its components for each core
    cav()

    # normalize Expert Corridor Importance Value (ECIV)
    eciv()

    # calc climate envelope
    if lp_env.CCERAST_IN:
        clim_env()

    # calc Corridor Specific Priority (CSP)
    prev_ws = arcpy.env.workspace
    sum_rasters = []
    count_non_null_cells_rasters = []
    max_rasters = []
    csp(sum_rasters, count_non_null_cells_rasters, max_rasters, lcp_lines)

    # calc Corridor Priority Value (CPV)
    cpv(sum_rasters, count_non_null_cells_rasters, max_rasters, cpv_raster)
    arcpy.env.workspace = prev_ws

    # calc Relative Corridor Importance (RCI)
    rci(cpv_raster, rci_raster)

    # calc Linkage Priority (LP)
    if lp_env.CALCLP:
        linkage_priority(rci_raster, trunc_raster, lp_raster)

    # calc Blended Priority (BP)
    if lp_env.CALCBP:
        if not lm_env.WRITETRUNCRASTER:
            msg = "When CALCBP = True, set WRITETRUNCRASTER = True in Linkage Mapper"
            lm_util.raise_error(msg)
        if not lp_env.CALCLP:
            msg = "When CALCBP = True, set CALCLP = True"
            lm_util.raise_error(msg)
        norm_trunc(trunc_raster, norm_trunc_raster)
        blended_priority(norm_trunc_raster, lp_raster, bp_raster)

    # save a copy of Cores as the "Output for ModelBuilder Precondition"
    if lp_env.OUTPUTFORMODELBUILDER:
        arcpy.CopyFeatures_management(lp_env.COREFC,
                                      lp_env.OUTPUTFORMODELBUILDER)
Exemplo n.º 16
0
def STEP2_build_network():
    """Generates initial version of linkTable.csv based on euclidean distances
    and adjacencies of core areas.

    """
    try:
        lu.dashline(1)
        gprint('Running script ' + _SCRIPT_NAME)
        outlinkTableFile = lu.get_this_step_link_table(step=2)

        # Warning flag for missing distances in conefor file
        # dropFlag = False

        # ------------------------------------------------------------------
        # adjacency file created from s1_getAdjacencies.py
        if cfg.S2ADJMETH_EU and not path.exists(cfg.EUCADJFILE):
            msg = ('\nERROR: Euclidean adjacency file required from '
                  'Step 1: ' + cfg.EUCADJFILE)
            lu.raise_error(msg)

        # ------------------------------------------------------------------
        # adjacency file created from s1_getAdjacencies.py
        if cfg.S2ADJMETH_CW and not path.exists(cfg.CWDADJFILE):
            msg = ('\nERROR: Cost-weighted adjacency file required from'
                              'Step 1: ' + cfg.CWDADJFILE)
            lu.raise_error(msg)
        #----------------------------------------------------------------------

        # Load eucDists matrix from file and npy.sort
        if cfg.S2EUCDISTFILE is None:
            eucdist_file = generate_distance_file()
        else:
            eucdist_file = cfg.S2EUCDISTFILE

        eucDists_in = npy.loadtxt(eucdist_file, dtype='Float64', comments='#')

        if eucDists_in.size == 3:  # If just one line in file
            eucDists = npy.zeros((1, 3), dtype='Float64')
            eucDists[0, :] = eucDists_in
            numDists = 1

        else:
            eucDists = eucDists_in
            numDists = eucDists.shape[0]
        del eucDists_in
        eucDists[:, 0:2] = npy.sort(eucDists[:, 0:2])
        ind = npy.lexsort((eucDists[:, 2], eucDists[:, 1], eucDists[:, 0]))
        eucDists = eucDists[ind]
        gprint('Core area distance list loaded.')
        gprint('number of pairwise distances = ' + str(numDists))
        # sort eucDists by 1st column then by 2nd then by 3rd

        #----------------------------------------------------------------------
        # Get rid of duplicate pairs of cores, retaining MINIMUM distance
        # between them
        numDistsOld = numDists
        for x in range(numDists - 2, -1, -1):
            if (eucDists[x, 0] == eucDists[x + 1, 0]
                and (eucDists[x, 1] == eucDists[x + 1, 1])):
                eucDists[x + 1, 0] = 0
        delRows = npy.asarray(npy.where(eucDists[:, 0] == 0))
        delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32")
        delRowsVector[:] = delRows[0, :]
        eucDists = lu.delete_row(eucDists, delRowsVector)
        del delRows
        del delRowsVector
        numDists = eucDists.shape[0]

        lu.dashline(1)
        gprint('Removed ' + str(numDistsOld - numDists) +
                          ' duplicate core pairs in Euclidean distance table.'
                          '\n')
        maxEucDistID = max(eucDists[:, 1])
        gprint('After removing duplicates and distances that exceed'
                          ' maximum, \nthere are ' + str(numDists) +
                          ' pairwise distances.  Max core ID number is ' +
                          str(int(maxEucDistID)) + '.')

        # Begin creating and manipulating linktables
        # zeros and many other array functions are imported from numpy
        linkTable = npy.zeros((len(eucDists), 10), dtype='int32')
        linkTable[:, 1:3] = eucDists[:, 0:2]
        linkTable[:, cfg.LTB_EUCDIST] = eucDists[:, 2]

        #----------------------------------------------------------------------
        # Get adjacencies using adj files from step 1.
        if cfg.S2ADJMETH_CW or cfg.S2ADJMETH_EU:  # Keep ALL links
            cwdAdjList = []
            eucAdjList = []
            if cfg.S2ADJMETH_CW:
                cwdAdjTable = get_adj_list(cfg.CWDADJFILE)
                cwdAdjList = []
                for i in range(0, len(cwdAdjTable)):
                    listEntry = (str(cwdAdjTable[i, 0]) + '_' + str(cwdAdjTable[i, 1]))
                    cwdAdjList.append(listEntry)
                gprint('Cost-weighted adjacency file loaded.')
                maxCwdAdjCoreID = max(cwdAdjTable[:, 1])
                del cwdAdjTable

            if cfg.S2ADJMETH_EU:
                eucAdjTable = get_adj_list(cfg.EUCADJFILE)
                eucAdjList = []
                for i in range(0, len(eucAdjTable)):
                    listEntry = (str(eucAdjTable[i, 0]) + '_' + str(eucAdjTable[i, 1]))
                    eucAdjList.append(listEntry)
                maxEucAdjCoreID = max(eucAdjTable[:, 1])
                del eucAdjTable

        # maxCoreId = max(maxEucAdjCoreID, maxCwdAdjCoreID, maxEucDistID)

        del eucDists

        gprint('Creating link table')
        linkTable[:, cfg.LTB_CWDADJ] = -1  # Euc adjacency not evaluated
        linkTable[:, cfg.LTB_EUCADJ] = -1
        if cfg.S2ADJMETH_CW or cfg.S2ADJMETH_EU:  
            for x in range(0, linkTable.shape[0]):
                listEntry = (str(linkTable[x, cfg.LTB_CORE1]) + '_' +
                             str(linkTable[x, cfg.LTB_CORE2]))
                if listEntry in cwdAdjList:
                    linkTable[x, cfg.LTB_CWDADJ] = 1
                else:
                    linkTable[x, cfg.LTB_CWDADJ] = 0
                if listEntry in eucAdjList:
                    linkTable[x, cfg.LTB_EUCADJ] = 1
                else:
                    linkTable[x, cfg.LTB_EUCADJ] = 0

        if cfg.S2ADJMETH_CW and cfg.S2ADJMETH_EU:  # "Keep all adjacent links"
            gprint("\nKeeping all adjacent links\n")
            rows = []
            for row in range(0, linkTable.shape[0]):
                if (linkTable[row, cfg.LTB_EUCADJ] == 0
                    and linkTable[row, cfg.LTB_CWDADJ] == 0):
                    rows.append(row)
            linkTable = lu.delete_row(linkTable, rows)

        elif cfg.S2ADJMETH_CW:
            gprint("\nKeeping cost-weighted adjacent links\n")
            delRows = npy.asarray(npy.where(linkTable[:, cfg.LTB_CWDADJ] == 0))
            delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32")
            delRowsVector[:] = delRows[0, :]
            linkTable = lu.delete_row(linkTable, delRowsVector)

        elif cfg.S2ADJMETH_EU:
            gprint("\nKeeping Euclidean adjacent links\n")
            delRows = npy.asarray(npy.where(linkTable[:, cfg.LTB_EUCADJ] == 0))
            delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32")
            delRowsVector[:] = delRows[0, :]
            linkTable = lu.delete_row(linkTable, delRowsVector)

        else:  # For Climate Corridor tool
            gprint("\nIgnoring adjacency and keeping all links\n")

        # if dropFlag:
            # lu.dashline(1)
            # gprint('NOTE: At least one adjacent link was dropped '
                          # 'because there was no Euclidean ')
            # gprint('distance value in the input distance file from '
                          # 'Conefor extension.')
            # lu.dashline(2)

        linkTable[:, cfg.LTB_CLUST1] = -1  # No clusters until later steps
        linkTable[:, cfg.LTB_CLUST2] = -1

        # not evaluated yet. May eventually have ability to get lcdistances
        # for adjacent cores from s1_getAdjacencies.py
        linkTable[:, cfg.LTB_CWDIST] = -1

        # Get list of core IDs, based on core area shapefile.
        coreList = lu.get_core_list(cfg.COREFC, cfg.COREFN)
        if len(npy.unique(coreList[:, 1])) < 2:
            lu.dashline(1)
            msg = ('\nERROR: There are less than two core '
                  'areas.\nThis means there is nothing to connect '
                  'with linkages. Bailing.')
            lu.raise_error(msg)

        # Set cfg.LTB_LINKTYPE to valid corridor code
        linkTable[:, cfg.LTB_LINKTYPE] = cfg.LT_CORR
        # Make sure linkTable is sorted
        ind = npy.lexsort((linkTable[:, cfg.LTB_CORE2],
              linkTable[:, cfg.LTB_CORE1]))
        if len(linkTable) == 0:
            msg = ('\nERROR: There are no valid core area '
                            'pairs. This can happen when core area numbers in '
                            'your Conefor distances text file do not match '
                            'those in your core area feature class.')
            lu.raise_error(msg)

        linkTable = linkTable[ind]

        # Assign link IDs in order
        for x in range(len(linkTable)):
            linkTable[x, cfg.LTB_LINKID] = x + 1

        #----------------------------------------------------------------------

        if cfg.CONNECTFRAGS:               
            connect_clusters(linkTable)     
        else:
            # Drop links that are too long
            gprint('\nChecking for corridors that are too long to map.')
            DISABLE_LEAST_COST_NO_VAL = False
            linkTable, numDroppedLinks = lu.drop_links(linkTable, cfg.MAXEUCDIST,
                                                       0, cfg.MINEUCDIST, 0,
                                                       DISABLE_LEAST_COST_NO_VAL)
            if numDroppedLinks > 0:
                lu.dashline(1)
                gprint('Removed ' + str(numDroppedLinks) +
                                  ' links that were too long in Euclidean '
                                  'distance.')

            # Write linkTable to disk
            gprint('Writing ' + outlinkTableFile)
            lu.write_link_table(linkTable, outlinkTableFile)
            linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s2.csv")
            lu.write_link_table(linkTable, linkTableLogFile)
            lu.report_links(linkTable)

            gprint('Creating shapefiles with linework for links.\n')
            try:
                lu.write_link_maps(outlinkTableFile, step=2)
            except:
                lu.write_link_maps(outlinkTableFile, step=2)
            gprint('Linework shapefiles written.')

            # if dropFlag:
                # print_conefor_warning()
            
    # Return GEOPROCESSING specific errors
    except arcgisscripting.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 2. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 2. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 17
0
def main():
    """Iterates over LM, BM, and restoration tasks"""

    ## USER SETTINGS ######################################################
    ## Restoration Settings
    ## ALL input data must be in the same projection
    start_time = time.clock()
    restoreMaxROI = False  # Set to True to restore highest ROI
    # Set to False to restore strongest barrier
    restoredResistanceVal = 1  # Resistance value of restored habitat.  Must be 1 or greater.
    restorationDataGDB = (
        "C:\\barrierClassAnalysis\\RestorationINPUTS_July2013.gdb"
    )  # No spaces or special chars in paths or gdb names
    outputDir = "C:\\barrierClassAnalysis\\output"  # No spaces in path, avoid using dropbox or network drive
    # Project directories will be created in this (iter1, iter2...)
    # as will an output geodatabase
    resistanceRaster = "URWA_resis"  # Resistance raster.  Should be in input GDB
    coreFC = "URWA_HCAs_Doug_Grant"  # Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant'
    coreFN = "HCA_ID"  # Core area field name

    radius = 450  # restoration radius in meters
    iterations = 13  # number of restorations to perform
    minAgThreshold = 0.75  # if less than this proportion of ag in circle, don't consider restoring circle
    minImprovementVal = (
        0
    )  # Don't consider barriers below this improvement score (average improvement per meter diameter restored)
    parcelCostRaster = (
        "DougGrantParcelCost_m2_projected_90m"
    )  # Average per-m2 parcel cost per pixel. Snapped to resistance raster.
    restorationCostRaster = "restCostPer_m2"  # Right now this is just a raster with all pixels set to 0.113174
    agRaster = "ARESmaskp_projected"  # 1=Ag, 0 = not Ag
    barrierCombineMethod = "Maximum"  # Some restorations benefit multiple corridors.
    # 'Maximum' takes the greatest improvement across core area pairs
    # 'Sum' adds improvement scores acreoss all pairs.
    cwdThresh = None  # Use cwdThresh = None for no threshold. Use cwdThresh = X to not consider
    # restorations more than X map units away from each core area.
    ## END USER SETTINGS ######################################################
    try:
        # Setup path and create directories
        gprint("Hey! Make sure everything is in the same projection!\n")
        gprint("Setting up paths and creating directories")
        sys.path.append("..\\toolbox\\scripts")
        resRast = os.path.join(restorationDataGDB, resistanceRaster)
        coreFCPath = os.path.join(restorationDataGDB, coreFC)

        # Set up a NEW output gdb (leave previous ones on drive)
        for i in range(1, 200):
            outputGDB = "restorationOutput" + str(i) + ".gdb"
            if not arcpy.Exists(os.path.join(outputDir, outputGDB)):
                break
            gprint("Previous output GDB " + outputGDB + " exists.  Delete to save disk space.")
        arcpy.CreateFileGDB_management(outputDir, outputGDB)
        outputGDB = os.path.join(outputDir, outputGDB)
        logFile = os.path.join(outputGDB, "Iterate Barriers" + str(i) + ".py")
        shutil.copyfile(__file__, logFile)  # write a copy of this file to output dir as a record of settings

        arcpy.env.cellSize = resRast
        arcpy.env.extent = resRast
        arcpy.env.snapRaster = resRast
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = outputGDB
        arcpy.env.workspace = outputGDB

        spatialref = arcpy.Describe(resRast).spatialReference
        mapunits = spatialref.linearUnitName
        gprint("Cell size = " + str(arcpy.env.cellSize) + " " + mapunits + "s")

        # Calculate fraction of ag within radius of each pixel
        gprint("Calculating purchase cost, fraction of ag, etc within radius of each pixel.")
        agRaster = os.path.join(restorationDataGDB, agRaster)
        inNeighborhood = NbrCircle(radius, "MAP")
        arcpy.env.extent = agRaster
        outFocalStats = arcpy.sa.FocalStatistics(agRaster, inNeighborhood, "MEAN", "NODATA")
        proportionAgRaster = os.path.join(outputGDB, "proportionAgRas")
        outFocalStats.save(proportionAgRaster)
        arcpy.env.extent = resRast

        # Calculate purchase cost of circles
        parcelCostRaster = os.path.join(restorationDataGDB, parcelCostRaster)
        arcpy.env.extent = parcelCostRaster
        outFocalStats = arcpy.sa.FocalStatistics(parcelCostRaster, inNeighborhood, "MEAN", "DATA")
        costFocalStatsRaster = os.path.join(outputGDB, "costFocalStatsRaster")
        outFocalStats.save(costFocalStatsRaster)
        arcpy.env.extent = resRast

        circleArea = float(npy.pi * radius * radius)
        outras = Raster(costFocalStatsRaster) * circleArea
        purchCostRaster = os.path.join(outputGDB, "purchaseCostRaster")
        outras.save(purchCostRaster)
        lu.delete_data(costFocalStatsRaster)

        # restCost = npy.pi * radius * radius * restCostPer_m2
        restorationCostRaster = os.path.join(restorationDataGDB, restorationCostRaster)
        outras = Raster(purchCostRaster) + (Raster(restorationCostRaster) * radius * radius * npy.pi)
        totalCostRaster = os.path.join(outputGDB, "totalCostRaster")
        outras.save(totalCostRaster)
        # lu.build_stats(totalCostRaster)

        # Create mask to remove areas without cost data
        arcpy.env.extent = totalCostRaster
        costMaskRaster = os.path.join(outputGDB, "costMaskRaster")
        costThresh = 0
        outCon = arcpy.sa.Con((Raster(totalCostRaster) > float(costThresh)), 1)
        outCon.save(costMaskRaster)
        arcpy.env.extent = resRast

        # Create mask to remove areas below ag threshold
        outCon = arcpy.sa.Con((Raster(proportionAgRaster) > float(minAgThreshold)), 1)
        agMaskRaster = os.path.join(outputGDB, "agMaskRaster")
        outCon.save(agMaskRaster)

        doStep1 = "true"
        doStep2 = "true"
        doStep5 = "false"
        for iter in range(1, iterations + 1):  # xxx
            start_time1 = time.clock()
            arcpy.env.cellSize = resRast  # Some env settings get changed by linkage mapper and must be reset here
            arcpy.env.extent = resRast
            arcpy.env.snapRaster = resRast
            arcpy.env.overwriteOutput = True
            arcpy.env.scratchWorkspace = outputGDB
            arcpy.env.workspace = outputGDB

            lu.dashline(1)
            gprint("Running iteration number " + str(iter))
            projDir = os.path.join(outputDir, "iter" + str(iter) + "Proj")
            lu.create_dir(outputDir)
            lu.delete_dir(projDir)  # xxx
            lu.create_dir(projDir)
            if iter > 1:  # Copy previous s2 linktable to new project directory
                datapassDir = os.path.join(projDir, "datapass")
                lu.create_dir(datapassDir)
                projDir1 = os.path.join(outputDir, "iter1Proj")
                datapassDirIter1 = os.path.join(projDir1, "datapass")
                s2LinktableIter1 = os.path.join(datapassDirIter1, "linkTable_s2.csv")
                s2LinkTable = os.path.join(datapassDir, "linkTable_s2.csv")
                shutil.copyfile(s2LinktableIter1, s2LinkTable)

            # Run Linkage Mapper
            distFile = os.path.join(
                outputDir, coreFC + "_dists.txt"
            )  # Copy distances text file from earlier LM run to the output directory- speeds things up!
            if not os.path.exists(distFile):
                if iter == 1:
                    gprint("Will calculate distance file.")
                    distFile = "#"
                else:
                    projDir1 = os.path.join(outputDir, "iter1Proj")
                    distFile1 = os.path.join(projDir1, coreFC + "_dists.txt")
                    shutil.copyfile(distFile1, distFile)  # Put a copy here for future runs

            arcpy.env.overwriteOutput = True
            arcpy.env.scratchWorkspace = outputGDB
            arcpy.env.workspace = outputGDB

            argv = (
                "lm_master.py",
                projDir,
                coreFCPath,
                coreFN,
                resRast,
                doStep1,
                doStep2,
                "Cost-Weighted & Euclidean",
                distFile,
                "true",
                "true",
                "false",
                "4",
                "Cost-Weighted",
                "true",
                doStep5,
                "10000",
                "#",
                "#",
            )
            gprint("Running " + str(argv))
            import lm_master  # xxx

            lm_master.lm_master(argv)  # xxx
            doStep1 = "false"  # Can skip for future iterations
            doStep2 = "false"  # Can skip for future iterations
            doStep5 = "false"  # Skipping for future iterations

            startRadius = str(radius)
            endRadius = str(radius)
            radiusStep = "0"
            saveRadiusRasters = "false"
            writePctRasters = "false"

            argv = (
                "barrier_master.py",
                projDir,
                resRast,
                startRadius,
                endRadius,
                radiusStep,
                barrierCombineMethod,
                saveRadiusRasters,
                writePctRasters,
                cwdThresh,
            )
            gprint("Running " + str(argv))
            import barrier_master  # xxx

            barrier_master.bar_master(argv)  # xxx

            arcpy.env.cellSize = resRast  # Some env settings get changed by linkage mapper and must be reset here
            arcpy.env.extent = resRast
            arcpy.env.snapRaster = resRast
            arcpy.env.overwriteOutput = True
            arcpy.env.scratchWorkspace = outputGDB
            arcpy.env.workspace = outputGDB

            gprint("Finding restoration circles with max barrier score / ROI")
            # Find points with max ROI
            PREFIX = os.path.basename(projDir)
            if barrierCombineMethod == "Sum":
                sumSuffix = "Sum"
            else:
                sumSuffix = ""
            barrierFN = PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + str(radius)
            barrierRaster = os.path.join(projDir, "output", "barriers.gdb", barrierFN)
            if not arcpy.Exists(barrierRaster):
                msg = "Error: cannot find barrier output: " + barrierRaster
                lu.raise_error(msg)

            # arcpy.env.cellSize = agMaskRaster
            # arcpy.env.extent = agMaskRaster

            if iter > 1:
                gprint("Creating mask for previously restored areas")
                inNeighborhood = NbrCircle(radius, "MAP")
                arcpy.env.extent = allRestoredAreasRaster
                outFocalStats = arcpy.sa.FocalStatistics(allRestoredAreasRaster, inNeighborhood, "MEAN", "DATA")
                allRestoredFocalRaster = os.path.join(outputGDB, "allRestFocRas_iter" + str(iter))
                outFocalStats.save(allRestoredFocalRaster)  # Anything > 0 would include a restored area and
                arcpy.env.extent = resRast
                restMaskRaster = os.path.join(outputGDB, "restMaskRaster_iter" + str(iter))
                minval = 0
                outCon = arcpy.sa.Con((Raster(allRestoredFocalRaster) == float(minval)), 1)
                outCon.save(restMaskRaster)

            # Candidate areas have not been restored, have cost data, meet
            # minimum improvement score criteria, and have enough ag in them
            candidateBarrierRaster = os.path.join(outputGDB, "candidateBarrierRaster" + "_iter" + str(iter))
            if iter > 1:
                gprint(
                    "Creating candidate restoration raster using barrier results, previous restorations, and selection criteria"
                )
                outCalc = (
                    Raster(costMaskRaster)
                    * Raster(agMaskRaster)
                    * Raster(barrierRaster)
                    * Raster(restMaskRaster)
                    * (radius * 2)
                )  # ROI scores will be in terms of total improvement (= score * diameter)
            else:
                outCalc = Raster(costMaskRaster) * Raster(agMaskRaster) * Raster(barrierRaster) * radius * 2

            minBarrierScore = minImprovementVal * radius * 2
            if restoredResistanceVal != 1:
                outCalc2 = outCalc - (2 * radius * (restoredResistanceVal - 1))
                outCon = arcpy.sa.Con((outCalc2 >= float(minBarrierScore)), outCalc2)
            else:
                outCon = arcpy.sa.Con((outCalc >= float(minBarrierScore)), outCalc)
            outCon.save(candidateBarrierRaster)
            lu.build_stats(candidateBarrierRaster)

            purchaseRoiRaster = os.path.join(outputGDB, "purchaseRoiRaster" + "_iter" + str(iter))
            outCalc = Raster(candidateBarrierRaster) / Raster(purchCostRaster)
            outCalc.save(purchaseRoiRaster)
            lu.build_stats(purchaseRoiRaster)

            totalRoiRaster = os.path.join(outputGDB, "purchaseRestRoiRaster" + "_iter" + str(iter))
            outCalc = Raster(candidateBarrierRaster) / Raster(totalCostRaster)
            outCalc.save(totalRoiRaster)
            lu.build_stats(totalRoiRaster)

            maxBarrier = arcpy.GetRasterProperties_management(candidateBarrierRaster, "MAXIMUM")
            gprint("Maximum barrier improvement score: " + str(maxBarrier.getOutput(0)))
            if maxBarrier < 0:
                arcpy.AddWarning("\nNo barriers found that meet CWD or Ag threshold criteria.")

            maxPurchROI = arcpy.GetRasterProperties_management(purchaseRoiRaster, "MAXIMUM")
            gprint("Maximum purchase ROI score: " + str(maxPurchROI.getOutput(0)))

            maxROI = arcpy.GetRasterProperties_management(totalRoiRaster, "MAXIMUM")
            gprint("Maximum total ROI score: " + str(maxROI.getOutput(0)))

            if restoreMaxROI:
                outPoint = os.path.join(outputGDB, "maxRoiPoint" + "_iter" + str(iter))
                gprint("Choosing circle with maximum ROI to restore")
                outCon = arcpy.sa.Con((Raster(totalRoiRaster) >= float(maxROI.getOutput(0))), totalRoiRaster)
                maxRoiRaster = os.path.join(outputGDB, "maxRoiRaster")
                outCon.save(maxRoiRaster)
                # Save max ROI to point
                try:
                    arcpy.RasterToPoint_conversion(maxRoiRaster, outPoint)
                except:
                    msg = "Error: it looks like there are no viable restoration candidates."
                    lu.raise_error(msg)

            else:  # Restoring strongest barrier instead
                outPoint = os.path.join(outputGDB, "maxBarrierPoint" + "_iter" + str(iter))
                gprint("Choosing circle with maximum BARRIER IMPROVEMENT SCORE to restore")
                outCon = arcpy.sa.Con(
                    (Raster(candidateBarrierRaster) >= float(maxBarrier.getOutput(0))), candidateBarrierRaster
                )
                maxBarrierRaster = os.path.join(outputGDB, "maxBarrierRaster")
                outCon.save(maxBarrierRaster)
                # Save max barrier to point
                try:
                    arcpy.RasterToPoint_conversion(maxBarrierRaster, outPoint)
                except:
                    msg = "Error: it looks like there are no viable restoration candidates."
                    lu.raise_error(msg)

            gprint("Done evaluating candidate restorations")
            result = int(arcpy.GetCount_management(outPoint).getOutput(0))
            if result > 1:
                arcpy.AddWarning(
                    "Deleting points with identical ROI/improvement score values"
                )  # Would be better to retain point with max barrier score when we have multiple points with same ROI
                arcpy.DeleteIdentical_management(outPoint, "grid_code", 0.1, 0.1)
            arcpy.sa.ExtractMultiValuesToPoints(
                outPoint,
                [
                    [candidateBarrierRaster, "barrierScore"],
                    [purchCostRaster, "purchCost"],
                    [totalCostRaster, "totalCost"],
                    [purchaseRoiRaster, "purchaseROI"],
                    [totalRoiRaster, "totalROI"],
                ],
                "NONE",
            )
            arcpy.AddField_management(outPoint, "restorationNumber", "SHORT")
            arcpy.CalculateField_management(outPoint, "restorationNumber", iter)
            arcpy.AddField_management(outPoint, "radius", "DOUBLE")
            arcpy.CalculateField_management(outPoint, "radius", radius)
            arcpy.AddField_management(outPoint, "barrierScore_per_m", "DOUBLE")
            arcpy.CalculateField_management(
                outPoint, "barrierScore_per_m", "(float(!barrierScore!) / (!radius! * 2))", "PYTHON"
            )

            gprint("\nCreating restoration circles")
            if restoreMaxROI:
                circleFC = os.path.join(outputGDB, "maxRoiCircle" + "_iter" + str(iter))
            else:
                circleFC = os.path.join(outputGDB, "maxBarrierCircle" + "_iter" + str(iter))
            arcpy.Buffer_analysis(outPoint, circleFC, radius)
            gprint("Rasterizing restoration circles")
            if restoreMaxROI:
                circleRas = os.path.join(outputGDB, "maxRoiCircleRas" + "_iter" + str(iter))
            else:
                circleRas = os.path.join(outputGDB, "maxBarrierCircleRas" + "_iter" + str(iter))
            arcpy.FeatureToRaster_conversion(circleFC, "totalROI", circleRas, arcpy.env.cellSize)

            # restore raster
            gprint("Digitally restoring resistance raster")
            resRastRestored = os.path.join(outputGDB, "resRastRestored" + "_iter" + str(iter))
            outCon = arcpy.sa.Con(IsNull(circleRas), resRast, restoredResistanceVal)
            outCon.save(resRastRestored)

            allRestoredAreasRaster = os.path.join(outputGDB, "allRestoredAreas_iter" + str(iter))
            PrevRestoredAreasRaster = os.path.join(outputGDB, "allRestoredAreas_iter" + str(iter - 1))
            if iter == 1:
                outCon = arcpy.sa.Con(IsNull(circleRas), 0, 1)
            else:
                outCon = arcpy.sa.Con(
                    IsNull(circleRas), PrevRestoredAreasRaster, 1
                )  # Add this restoration to areas restored
            outCon.save(allRestoredAreasRaster)

            lu.delete_data(circleRas)
            resRast = resRastRestored  # Use for next iteration resistance raster

            # Add circle into feature class with all circles
            if restoreMaxROI:
                allCirclesFC = os.path.join(outputGDB, "allCirclesMaxROI")
            else:
                allCirclesFC = os.path.join(outputGDB, "allCirclesMaxBarriers")
            if iter == 1:
                arcpy.CopyFeatures_management(circleFC, allCirclesFC)
            else:
                arcpy.Append_management(circleFC, allCirclesFC, "TEST")
            gprint("Finished iteration #" + str(iter))
            start_time1 = lu.elapsed_time(start_time1)

        gprint("\nDone with iterations.")
        start_time = lu.elapsed_time(start_time)
        gprint("Outputs saved in: " + outputGDB)
        gprint("Back up your project directories if you want to save corridor/barrier results.")

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint("****Iteration script failed. Details follow.****")
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint("****Iteration script failed. Details follow.****")
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 18
0
def calc_lccs(normalize):
    try:  
        if normalize:
            mosaicBaseName = "_corridors"
            writeTruncRaster = cfg.WRITETRUNCRASTER
            outputGDB = cfg.OUTPUTGDB
            if cfg.CALCNONNORMLCCS:
                SAVENORMLCCS = False
            else:
                SAVENORMLCCS = cfg.SAVENORMLCCS
        else:
            mosaicBaseName = "_NON_NORMALIZED_corridors"
            SAVENORMLCCS = False
            outputGDB = cfg.EXTRAGDB
            writeTruncRaster = False

        lu.dashline(1)
        gprint('Running script ' + _SCRIPT_NAME)
        linkTableFile = lu.get_prev_step_link_table(step=5)
        if cfg.useArcpy:
            arcpy.env.workspace = cfg.SCRATCHDIR
            arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
            arcpy.env.overwriteOutput = True  
            arcpy.env.compression = "NONE"
        else:        
            gp.workspace = cfg.SCRATCHDIR
            gp.scratchWorkspace = cfg.ARCSCRATCHDIR
            gp.OverwriteOutput = True            

        if cfg.MAXEUCDIST is not None:
            gprint('Max Euclidean distance between cores')
            gprint('for linkage mapping set to ' +
                              str(cfg.MAXEUCDIST))

        if cfg.MAXCOSTDIST is not None:
            gprint('Max cost-weighted distance between cores')
            gprint('for linkage mapping set to ' +
                              str(cfg.MAXCOSTDIST))


        # set the analysis extent and cell size to that of the resistance
        # surface
        if cfg.useArcpy:
            arcpy.env.Extent = cfg.RESRAST
            arcpy.env.cellSize = cfg.RESRAST
            arcpy.env.snapRaster = cfg.RESRAST
            arcpy.env.mask = cfg.RESRAST
        else:
            gp.Extent = (gp.Describe(cfg.RESRAST)).Extent 
            gp.cellSize = gp.Describe(cfg.RESRAST).MeanCellHeight
            gp.mask = cfg.RESRAST
            gp.snapraster = cfg.RESRAST

        linkTable = lu.load_link_table(linkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg =('\nThere are no corridors to map. Bailing.')
            lu.raise_error(msg)


        if not cfg.STEP3 and not cfg.STEP4:
            # re-check for links that are too long or in case script run out of
            # sequence with more stringent settings
            gprint('Double-checking for corridors that are too long to map.')
            DISABLE_LEAST_COST_NO_VAL = True
            linkTable,numDroppedLinks = lu.drop_links(
                linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST,
                cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL)

        # Added to try to speed up:
        gp.pyramid = "NONE"
        gp.rasterstatistics = "NONE"

        # set up directories for normalized lcc and mosaic grids
        dirCount = 0
        gprint("Creating output folder: " + cfg.LCCBASEDIR)
        lu.delete_dir(cfg.LCCBASEDIR)
        gp.CreateFolder_management(path.dirname(cfg.LCCBASEDIR),
                                       path.basename(cfg.LCCBASEDIR))
        gp.CreateFolder_management(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        clccdir = path.join(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        # mosaicGDB = path.join(cfg.LCCBASEDIR, "mosaic.gdb")
        # gp.createfilegdb(cfg.LCCBASEDIR, "mosaic.gdb")
        #mosaicRaster = mosaicGDB + '\\' + "nlcc_mos" # Full path
        gprint("")
        if normalize:
            gprint('Normalized least-cost corridors will be written '
                          'to ' + clccdir + '\n')
        PREFIX = cfg.PREFIX

        # Add CWD layers for core area pairs to produce NORMALIZED LCC layers
        numGridsWritten = 0
        coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1]
        coreList = npy.sort(coreList)

        x = 0
        linkCount = 0
        endIndex = numLinks
        while x < endIndex:
            if (linkTable[x, cfg.LTB_LINKTYPE] < 1): # If not a valid link
                x = x + 1
                continue
                
            linkCount = linkCount + 1
            start_time = time.clock() 
            
            linkId = str(int(linkTable[x, cfg.LTB_LINKID]))

            # source and target cores
            corex=int(coreList[x,0])
            corey=int(coreList[x,1])

            # Get cwd rasters for source and target cores
            cwdRaster1 = lu.get_cwd_path(corex)
            cwdRaster2 = lu.get_cwd_path(corey)

            if not gp.Exists(cwdRaster1):
                msg =('\nError: cannot find cwd raster:\n' + cwdRaster1) 
            if not gp.Exists(cwdRaster2):
                msg =('\nError: cannot find cwd raster:\n' + cwdRaster2) 
                lu.raise_error(msg)

            
            lccNormRaster = path.join(clccdir, str(corex) + "_" +
                                      str(corey))# + ".tif")
            if cfg.useArcpy: 
                arcpy.env.Extent = "MINOF"
            else:
                gp.Extent = "MINOF"

            # FIXME: need to check for this?:
            # if exists already, don't re-create
            #if not gp.Exists(lccRaster):

            link = lu.get_links_from_core_pairs(linkTable, corex, corey)

            offset = 10000 

            # Normalized lcc rasters are created by adding cwd rasters and
            # subtracting the least cost distance between them.
            count = 0
            if arcpyAvailable:
                cfg.useArcpy = True # Fixes Canran Liu's bug with lcDist
            if cfg.useArcpy:
                
                lcDist = (float(linkTable[link,cfg.LTB_CWDIST]) - offset) 
                
                if normalize:
                    statement = ('outras = Raster(cwdRaster1) + Raster('
                        'cwdRaster2) - lcDist; outras.save(lccNormRaster)') 
                                                
                else:
                    statement = ('outras =Raster(cwdRaster1) + Raster('
                                'cwdRaster2); outras.save(lccNormRaster)')
            else:
                if normalize:
                    lcDist = str(linkTable[link,cfg.LTB_CWDIST] - offset)  
                    expression = (cwdRaster1 + " + " + cwdRaster2 + " - " 
                                  + lcDist)
                else:
                    expression = (cwdRaster1 + " + " + cwdRaster2) 
                statement = ('gp.SingleOutputMapAlgebra_sa(expression, '
                     'lccNormRaster)')
            count = 0
            while True:
                try: 
                    exec statement                    
                    randomerror()
                except:
                    count,tryAgain = lu.retry_arc_error(count,statement)
                    if not tryAgain:    
                        exec statement
                else: break
            cfg.useArcpy = False # End fix for Conran Liu's bug with lcDist
            
            if normalize and cfg.useArcpy: 
                try: 
                    minObject = gp.GetRasterProperties(lccNormRaster, "MINIMUM") 
                    rasterMin = float(str(minObject.getoutput(0)))
                except:
                    gp.AddWarning('\n------------------------------------------------')
                    gp.AddWarning('WARNING: Raster minimum check failed in step 5. \n'
                        'This may mean the output rasters are corrupted. Please \n'
                        'be sure to check for valid rasters in '+ outputGDB)
                    rasterMin = 0
                tolerance = (float(gp.cellSize) * -10) + offset
                if rasterMin < tolerance:
                    lu.dashline(1)
                    msg = ('WARNING: Minimum value of a corridor #' + str(x+1) 
                           + ' is much less than zero ('+str(rasterMin)+').'
                           '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                           'were too small and a corridor passed outside of a '
                           'bounding circle, or that a corridor passed outside of the '
                           'resistance map. \n')
                    gp.AddWarning(msg)

            
            if cfg.useArcpy: 
                arcpy.env.Extent = cfg.RESRAST
            else:
                gp.Extent = (gp.Describe(cfg.RESRAST)).Extent

            mosaicDir = path.join(cfg.LCCBASEDIR,'mos'+str(x+1))  
            lu.create_dir(mosaicDir) 
            mosFN = 'mos'#.tif' change and move
            mosaicRaster = path.join(mosaicDir,mosFN) 
                       
            if numGridsWritten == 0 and dirCount == 0:
                #If this is the first grid then copy rather than mosaic
                arcObj.CopyRaster_management(lccNormRaster, mosaicRaster) 
            else:
                
                rasterString = '"'+lccNormRaster+";"+lastMosaicRaster+'"'
                statement = ('arcObj.MosaicToNewRaster_management('
                            'rasterString,mosaicDir,mosFN, "", '
                            '"32_BIT_FLOAT", gp.cellSize, "1", "MINIMUM", '
                            '"MATCH")') 
                # statement = ('arcpy.Mosaic_management(lccNormRaster, '
                                 # 'mosaicRaster, "MINIMUM", "MATCH")') 
                
                count = 0
                while True:
                    try:
                        lu.write_log('Executing mosaic for link #'+str(linkId))
                        exec statement
                        lu.write_log('Done with mosaic.')
                        randomerror()
                    except:
                        count,tryAgain = lu.retry_arc_error(count,statement)
                        lu.delete_data(mosaicRaster)
                        lu.delete_dir(mosaicDir)
                        # Try a new directory
                        mosaicDir = path.join(cfg.LCCBASEDIR,'mos'+str(x+1)+ '_' + str(count))
                        lu.create_dir(mosaicDir)
                        mosaicRaster = path.join(mosaicDir,mosFN)                        
                        if not tryAgain:    
                            exec statement
                    else: break
            endTime = time.clock()
            processTime = round((endTime - start_time), 2)

            if normalize == True:
                printText = "Normalized and mosaicked "
            else:
                printText = "Mosaicked NON-normalized "
            gprint(printText + "corridor for link ID #" + str(linkId) +
                    " connecting core areas " + str(corex) +
                    " and " + str(corey)+ " in " +
                    str(processTime) + " seconds. " + str(int(linkCount)) +
                    " out of " + str(int(numCorridorLinks)) + " links have been "
                    "processed.")

            # temporarily disable links in linktable - don't want to mosaic
            # them twice
            for y in range (x+1,numLinks):
                corex1 = int(coreList[y,0])
                corey1 = int(coreList[y,1])
                if corex1 == corex and corey1 == corey:
                    linkTable[y,cfg.LTB_LINKTYPE] = (
                        linkTable[y,cfg.LTB_LINKTYPE] + 1000)
                elif corex1==corey and corey1==corex:
                    linkTable[y,cfg.LTB_LINKTYPE] = (
                            linkTable[y,cfg.LTB_LINKTYPE] + 1000)

            numGridsWritten = numGridsWritten + 1
            if not SAVENORMLCCS:
                lu.delete_data(lccNormRaster)
                lu.delete_dir(clccdir)
                lu.create_dir(clccdir)  
            else:
                if numGridsWritten == 100:
                    # We only write up to 100 grids to any one folder
                    # because otherwise Arc slows to a crawl
                    dirCount = dirCount + 1
                    numGridsWritten = 0
                    clccdir = path.join(cfg.LCCBASEDIR,
                                        cfg.LCCNLCDIR_NM + str(dirCount))
                    gprint("Creating output folder: " + clccdir)
                    gp.CreateFolder_management(cfg.LCCBASEDIR,
                                               path.basename(clccdir))

            if numGridsWritten > 1 or dirCount > 0:                                       
                lu.delete_data(lastMosaicRaster)
                lu.delete_dir(path.dirname(lastMosaicRaster))

            lastMosaicRaster = mosaicRaster
            x = x + 1
            
        #rows that were temporarily disabled
        rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE]>1000)
        linkTable[rows,cfg.LTB_LINKTYPE] = (
            linkTable[rows,cfg.LTB_LINKTYPE] - 1000)
        # ---------------------------------------------------------------------

        # Create output geodatabase
        if not gp.exists(outputGDB):
            gp.createfilegdb(cfg.OUTPUTDIR, path.basename(outputGDB))

        if cfg.useArcpy:
            arcpy.env.workspace = outputGDB
        else:        
            gp.workspace = outputGDB

        gp.pyramid = "NONE"
        gp.rasterstatistics = "NONE"

        
        # Copy mosaic raster to output geodatabase
        saveFloatRaster = False
        if saveFloatRaster == True:
            floatRaster = outputGDB + '\\' + PREFIX + mosaicBaseName + '_flt' # Full path 
            statement = 'arcObj.CopyRaster_management(mosaicRaster, floatRaster)'
            try:
                exec statement
            except:
                pass
                

        # ---------------------------------------------------------------------
        # convert mosaic raster to integer
        intRaster = path.join(outputGDB,PREFIX + mosaicBaseName)
        if cfg.useArcpy:
            statement = ('outras = Int(Raster(mosaicRaster) - offset + 0.5); ' 
                        'outras.save(intRaster)')
        else:
            expression = "int(" + mosaicRaster + " - " + str(offset) + " + 0.5)"
            statement = 'gp.SingleOutputMapAlgebra_sa(expression, intRaster)'
        count = 0
        while True:
            try: 
                exec statement
                randomerror()
            except:
                count,tryAgain = lu.retry_arc_error(count,statement)
                if not tryAgain: exec statement
            else: break
        # ---------------------------------------------------------------------       
        

        if writeTruncRaster:
            # -----------------------------------------------------------------
            # Set anything beyond cfg.CWDTHRESH to NODATA.
            if arcpyAvailable:
                cfg.useArcpy = True # For Alissa Pump's error with 10.1
            cutoffText = str(cfg.CWDTHRESH)
            if cutoffText[-6:] == '000000':
                cutoffText = cutoffText[0:-6]+'m' 
            elif cutoffText[-3:] == '000':
                cutoffText = cutoffText[0:-3]+'k' 
            
            truncRaster = (outputGDB + '\\' + PREFIX + mosaicBaseName + 
                           '_truncated_at_' + cutoffText)

            count = 0
            if cfg.useArcpy:
                statement = ('outRas = Raster(intRaster) * '
                            '(Con(Raster(intRaster) <= cfg.CWDTHRESH,1)); '
                            'outRas.save(truncRaster)')
            else:
                expression = ("(" + intRaster + " * (con(" + intRaster + "<= " 
                              + str(cfg.CWDTHRESH) + ",1)))")
                statement = ('gp.SingleOutputMapAlgebra_sa(expression, '
                                                          'truncRaster)')
            count = 0
            while True:
                try: 
                    exec statement
                    randomerror()
                except:
                    count,tryAgain = lu.retry_arc_error(count,statement)
                    if not tryAgain: exec statement
                else: break
            cfg.useArcpy = False # End fix for Alissa Pump's error with 10.1                
        # ---------------------------------------------------------------------
        # Check for unreasonably low minimum NLCC values    
        try:
            mosaicGrid = path.join(cfg.LCCBASEDIR,'mos') 
            # Copy to grid to test
            arcObj.CopyRaster_management(mosaicRaster, mosaicGrid)
            minObject = gp.GetRasterProperties(mosaicGrid, "MINIMUM") 
            rasterMin = float(str(minObject.getoutput(0)))
        except:
            gp.AddWarning('\n------------------------------------------------')
            gp.AddWarning('WARNING: Raster minimum check failed in step 5. \n'
                'This may mean the output rasters are corrupted. Please \n'
                'be sure to check for valid rasters in '+ outputGDB)
            rasterMin = 0
        tolerance = (float(gp.cellSize) * -10)
           
        if rasterMin < tolerance:
            lu.dashline(1)
            msg = ('WARNING: Minimum value of mosaicked corridor map is ' 
                   'much less than zero ('+str(rasterMin)+').'
                   '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                   'were too small and a corridor passed outside of a '
                   'bounding circle, or that a corridor passed outside of the '
                   'resistance map. \n')
            gp.AddWarning(msg) 
                            

        gprint('\nWriting final LCP maps...')
        if cfg.STEP4:
            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=4,
                                                     thisStep=5)
        elif cfg.STEP3:
            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=3,
                                                     thisStep=5)
        else:
            # Don't know if step 4 was run, since this is started at step 5.
            # Use presence of previous linktable files to figure this out.
            # Linktable name includes step number.
            prevLinkTableFile = lu.get_prev_step_link_table(step=5)
            prevStepInd = len(prevLinkTableFile) - 5
            lastStep = prevLinkTableFile[prevStepInd]

            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep,
                                                     thisStep=5)

        outlinkTableFile = lu.get_this_step_link_table(step=5)
        gprint('Updating ' + outlinkTableFile)
        lu.write_link_table(linkTable, outlinkTableFile)

        linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s5.csv")
        lu.write_link_table(linkTable, linkTableLogFile)

        linkTableFinalFile = path.join(cfg.OUTPUTDIR, PREFIX +
                                       "_linkTable_s5.csv")
        lu.write_link_table(finalLinkTable, linkTableFinalFile)
        gprint('Copy of final linkTable written to '+
                          linkTableFinalFile)

        gprint('Creating shapefiles with linework for links.')
        try:
            lu.write_link_maps(outlinkTableFile, step=5)
        except:
            lu.write_link_maps(outlinkTableFile, step=5)

        # Create final linkmap files in output directory, and remove files from
        # scratch.
        lu.copy_final_link_maps(step=5)

        if not SAVENORMLCCS:
            lu.delete_dir(cfg.LCCBASEDIR)

        # Build statistics for corridor rasters
        gp.addmessage('\nBuilding output statistics and pyramids '
                          'for corridor raster')
        lu.build_stats(intRaster)

        if writeTruncRaster:
            gp.addmessage('Building output statistics '
                              'for truncated corridor raster')
            lu.build_stats(truncRaster)

    # Return GEOPROCESSING specific errors
    except arcgisscripting.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 19
0
def circuitscape_master(argv=None):
    """

    """
    gprint = lu.gprint
    gwarn = arcpy.AddWarning

    if argv is None:
        argv = sys.argv    
    
    cfg.configure(cfg.TOOL_CS, argv)
    gp = cfg.gp

    
    try:
        lu.create_dir(cfg.LOGDIR)
        lu.create_dir(cfg.MESSAGEDIR)
        cfg.logFilePath=lu.create_log_file(cfg.MESSAGEDIR, cfg.TOOL, 
                                           cfg.PARAMS) 

        CSPATH = lu.get_cs_path()
        if CSPATH == None:
            msg = ('Cannot find an installation of Circuitscape 3.5.5'
                    '\nor greater in your Program Files directory.')
            arcpy.AddError(msg)
            lu.write_log(msg)
            exit(1)
            
        try:
            csDir, fn = path.split(CSPATH)
            if 'flush' not in open(path.join(csDir,'cs_compute.py')).read():
                gwarn('\n---------------------------------------------')
                gwarn('Your version of Circuitscape is out of date. ')
                gwarn('---------------------------------------------\n')
                gwarn('Please get the latest from www.circuitscape.org.')
                gwarn('The new version interacts more smoothly with ArcMap.')
                gprint('Proceeding...\n')
        except: pass
        
        lu.print_drive_warning()
        # Check core ID field.
        lu.check_cores(cfg.COREFC, cfg.COREFN)

        gp.OutputCoordinateSystem = gp.describe(cfg.COREFC).SpatialReference
        # Set data frame spatial reference to coordinate system of input data 
        lu.set_dataframe_sr()
        
        gp.pyramid = "NONE"
        gp.rasterstatistics = "NONE"

        # Move adj and cwd results from earlier versions to datapass directory
        lu.move_old_results()
    
        if cfg.CWDCUTOFF > 0:
            lu.delete_dir(cfg.SCRATCHDIR)

        # restart code- in progress
        if cfg.CWDCUTOFF < 0:
            cfg.CWDCUTOFF = cfg.CWDCUTOFF * -1
            
        if not cfg.DOPINCH and not cfg.DOCENTRALITY:
            msg = ('ERROR: Please choose at least one option: pinch point or\n'
                    'network centrality analysis.')
            lu.raise_error(msg)

        lu.create_dir(cfg.SCRATCHDIR)
        lu.create_dir(cfg.ARCSCRATCHDIR)

        if cfg.DO_ALLPAIRS:
            #  Fixme: move raster path to config
            S5CORRIDORRAS = path.join(cfg.OUTPUTGDB,cfg.PREFIX + "_corridors")
            if not gp.Exists(S5CORRIDORRAS):
                S5CORRIDORRAS = path.join(cfg.OUTPUTGDB, cfg.PREFIX +
                                         "_lcc_mosaic_int")
            if not gp.Exists(S5CORRIDORRAS):
                msg = ('ERROR: Corridor raster created in step 5 is required'
                        '\nfor all-pair analyses, but was not found.')
                lu.raise_error(msg)
        if cfg.DOPINCH:
            if cfg.CWDCUTOFF == '#' or cfg.CWDCUTOFF == 0:
                msg = ('ERROR: CWD cutoff distance is required for pinch point'
                        ' analyses.')
                lu.raise_error(msg)
            
            # Make a local grid copy of resistance raster-
            # will run faster than gdb.
            lu.delete_data(cfg.RESRAST)
            if not gp.Exists(cfg.RESRAST_IN):
                msg = ('ERROR: Resistance raster is required for pinch point'
                        ' analyses, but was not found.')
                lu.raise_error(msg)
            
            desc = arcpy.Describe(cfg.RESRAST_IN)
            if hasattr(desc, "catalogPath"):
                cfg.RESRAST_IN = arcpy.Describe(cfg.RESRAST_IN).catalogPath
            
            arcpy.env.extent = cfg.RESRAST_IN
            arcpy.env.snapRaster = cfg.RESRAST_IN
            gprint('\nMaking local copy of resistance raster.')
            gp.CopyRaster_management(cfg.RESRAST_IN, cfg.RESRAST)

        if cfg.DOCENTRALITY:
            gprint("Creating output folder: " + cfg.CENTRALITYBASEDIR)
            if path.exists(cfg.CENTRALITYBASEDIR):
                shutil.rmtree(cfg.CENTRALITYBASEDIR)
            lu.create_dir(cfg.CENTRALITYBASEDIR)
            gp.CreateFolder_management(cfg.CENTRALITYBASEDIR,
                                        cfg.CIRCUITOUTPUTDIR_NM)
            gp.CreateFolder_management(cfg.CENTRALITYBASEDIR,
                                        cfg.CIRCUITCONFIGDIR_NM)
            lu.clean_out_workspace(cfg.CORECENTRALITYGDB)

            s7.STEP7_calc_centrality()
            if not cfg.SAVECENTRALITYDIR:
                lu.delete_dir(cfg.CENTRALITYBASEDIR)

        if cfg.DOPINCH:
            if cfg.CWDCUTOFF > 0: # Negative values mean we're restarting
                gprint("Creating output folder: " + cfg.CIRCUITBASEDIR)
                lu.delete_dir(cfg.CIRCUITBASEDIR)
                lu.create_dir(cfg.CIRCUITBASEDIR)
                gp.CreateFolder_management(cfg.CIRCUITBASEDIR,
                                        cfg.CIRCUITOUTPUTDIR_NM)
                gp.CreateFolder_management(cfg.CIRCUITBASEDIR,
                                        cfg.CIRCUITCONFIGDIR_NM)

            s8.STEP8_calc_pinchpoints()            
            
            if not cfg.SAVE_TEMP_CIRCUIT_FILES:
                lu.delete_dir(cfg.SCRATCHDIR)
            if not cfg.SAVECIRCUITDIR:
                lu.delete_dir(cfg.CIRCUITBASEDIR)

        gprint('\nDONE!\n')

    # Return GEOPROCESSING specific errors
    except arcgisscripting.ExecuteError:
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 20
0
def STEP8_calc_pinchpoints():
    """ Maps pinch points in Linkage Mapper corridors using Circuitscape
        given CWD calculations from s3_calcCwds.py.

    """
    try:
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)

        restartFlag = False
        if cfg.CWDCUTOFF < 0:
            cfg.CWDCUTOFF = cfg.CWDCUTOFF * -1
            restartFlag = True  # Restart code in progress

        CSPATH = lu.get_cs_path()
        outputGDB = path.join(cfg.OUTPUTDIR, path.basename(cfg.PINCHGDB))

        arcpy.OverWriteOutput = True
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterstatistics = "NONE"

        # set the analysis extent and cell size to that of the resistance
        # surface
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = cfg.RESRAST
        arcpy.snapraster = cfg.RESRAST

        resRaster = cfg.RESRAST
        arcpy.env.extent = "MINOF"

        minObject = arcpy.GetRasterProperties_management(resRaster, "MINIMUM")
        rasterMin = float(str(minObject.getOutput(0)))
        if rasterMin <= 0:
            msg = (
                'Error: resistance raster cannot have 0 or negative values.')
            lu.raise_error(msg)

        if cfg.DO_ADJACENTPAIRS:
            prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep=8)
            if not arcpy.Exists(prevLcpShapefile):
                msg = ('Cannot find an LCP shapefile from step 5.  Please '
                       'rerun that step and any previous ones if necessary.')
                lu.raise_error(msg)

            # Remove lcp shapefile
            lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s8.shp")
            lu.delete_data(lcpShapefile)

        inLinkTableFile = lu.get_prev_step_link_table(step=8)
        linkTable = lu.load_link_table(inLinkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg = ('\nThere are no linkages. Bailing.')
            lu.raise_error(msg)

        if linkTable.shape[1] < 16:  # If linktable has no entries from prior
            # centrality or pinchpint analyses
            extraCols = npy.zeros((numLinks, 6), dtype="float64")
            linkTable = linkTable[:, 0:10]
            linkTable = npy.append(linkTable, extraCols, axis=1)
            linkTable[:, cfg.LTB_LCPLEN] = -1
            linkTable[:, cfg.LTB_CWDEUCR] = -1
            linkTable[:, cfg.LTB_CWDPATHR] = -1
            linkTable[:, cfg.LTB_EFFRESIST] = -1
            linkTable[:, cfg.LTB_CWDTORR] = -1
            linkTable[:, cfg.LTB_CURRENT] = -1
            del extraCols

        # set up directories for circuit and circuit mosaic grids
        # Create output geodatabase
        if not arcpy.Exists(cfg.PINCHGDB):
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR,
                                           path.basename(cfg.PINCHGDB))

        mosaicRaster = path.join(cfg.CIRCUITBASEDIR, "current_mos" + tif)
        coresToProcess = npy.unique(linkTable[:,
                                              cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        maxCoreNum = max(coresToProcess)
        del coresToProcess

        lu.dashline(0)
        coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]
        coreList = npy.sort(coreList)
        #gprint('There are ' + str(len(npy.unique(coreList))) ' core areas.')

        INCIRCUITDIR = cfg.CIRCUITBASEDIR
        OUTCIRCUITDIR = path.join(cfg.CIRCUITBASEDIR, cfg.CIRCUITOUTPUTDIR_NM)
        CONFIGDIR = path.join(INCIRCUITDIR, cfg.CIRCUITCONFIGDIR_NM)

        # Cutoff value text to append to filenames
        cutoffText = str(cfg.CWDCUTOFF)
        if cutoffText[-6:] == '000000':
            cutoffText = cutoffText[0:-6] + 'm'
        elif cutoffText[-3:] == '000':
            cutoffText = cutoffText[0:-3] + 'k'

        if cfg.SQUARERESISTANCES:
            # Square resistance values
            squaredRaster = path.join(cfg.SCRATCHDIR, 'res_sqr')
            arcpy.env.workspace = cfg.SCRATCHDIR
            arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
            outRas = Raster(resRaster) * Raster(resRaster)
            outRas.save(squaredRaster)
            resRaster = squaredRaster

        if cfg.DO_ADJACENTPAIRS:
            linkLoop = 0
            lu.dashline(1)
            gprint('Mapping pinch points in individual corridors \n'
                   'using Circuitscape.')
            lu.dashline(1)
            gprint('If you try to cancel your run and the Arc dialog hangs, ')
            gprint('you can kill Circuitscape by opening Windows Task Manager')
            gprint('and ending the cs_run.exe process.')
            lu.dashline(2)

            for x in range(0, numLinks):
                linkId = str(int(linkTable[x, cfg.LTB_LINKID]))
                if not (linkTable[x, cfg.LTB_LINKTYPE] > 0):
                    continue
                linkLoop = linkLoop + 1
                linkDir = path.join(cfg.SCRATCHDIR, 'link' + linkId)
                if restartFlag == True and path.exists(linkDir):
                    gprint('continuing')
                    continue
                restartFlag = False
                lu.create_dir(linkDir)
                start_time1 = time.clock()

                # source and target cores
                corex = int(coreList[x, 0])
                corey = int(coreList[x, 1])

                # Get cwd rasters for source and target cores
                cwdRaster1 = lu.get_cwd_path(corex)
                cwdRaster2 = lu.get_cwd_path(corey)

                lccNormRaster = path.join(linkDir, 'lcc_norm')
                arcpy.env.extent = "MINOF"

                link = lu.get_links_from_core_pairs(linkTable, corex, corey)
                lcDist = float(linkTable[link, cfg.LTB_CWDIST])

                # Normalized lcc rasters are created by adding cwd rasters
                # and subtracting the least cost distance between them.
                outRas = Raster(cwdRaster1) + Raster(cwdRaster2) - lcDist
                outRas.save(lccNormRaster)

                #create raster mask
                resMaskRaster = path.join(linkDir, 'res_mask' + tif)

                #create raster mask
                outCon = arcpy.sa.Con(
                    Raster(lccNormRaster) <= cfg.CWDCUTOFF, 1)
                outCon.save(resMaskRaster)

                # Convert to poly.  Use as mask to clip resistance raster.
                resMaskPoly = path.join(linkDir, 'res_mask_poly.shp')
                arcpy.RasterToPolygon_conversion(resMaskRaster, resMaskPoly,
                                                 "NO_SIMPLIFY")
                arcpy.env.extent = resMaskPoly

                # Includes 0 values in some cases with CP LI model if tif
                # so using ESRI Grid format
                resClipRasterMasked = path.join(linkDir, 'res_clip_m')
                # Extract masked resistance raster.
                # Needs to be float to get export to npy to work.
                outRas = arcpy.sa.ExtractByMask(resRaster, resMaskPoly) + 0.0
                outRas.save(resClipRasterMasked)

                resNpyFN = 'resistances_link_' + linkId + '.npy'
                resNpyFile = path.join(INCIRCUITDIR, resNpyFN)
                numElements, numResistanceNodes = export_ras_to_npy(
                    resClipRasterMasked, resNpyFile)

                totMem, availMem = lu.get_mem()
                # gprint('Total memory: str(totMem))
                if numResistanceNodes / availMem > 2000000:
                    lu.dashline(1)
                    lu.warn('Warning:')
                    lu.warn('Circuitscape can only solve 2-3 million nodes')
                    lu.warn(
                        'per gigabyte of available RAM. \nTotal physical RAM'
                        ' on your machine is ~' + str(totMem) +
                        ' GB. \nAvailable memory is ~' + str(availMem) +
                        ' GB. \nYour resistance raster has ' +
                        str(numResistanceNodes) + ' nodes.')
                    lu.dashline(2)
                corePairRaster = path.join(linkDir, 'core_pairs' + tif)
                arcpy.env.extent = resClipRasterMasked

                # Next result needs to be floating pt for numpy export
                outCon = arcpy.sa.Con(
                    Raster(cwdRaster1) == 0, corex,
                    arcpy.sa.Con(Raster(cwdRaster2) == 0, corey + 0.0))
                outCon.save(corePairRaster)

                coreNpyFN = 'cores_link_' + linkId + '.npy'
                coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN)
                numElements, numNodes = export_ras_to_npy(
                    corePairRaster, coreNpyFile)

                arcpy.env.extent = "MINOF"

                # Set circuitscape options and call
                options = lu.setCircuitscapeOptions()
                if cfg.WRITE_VOLT_MAPS == True:
                    options['write_volt_maps'] = True
                options['habitat_file'] = resNpyFile

                # if int(linkId) > 2:
                # options['habitat_file'] = 'c:\\test.dummy'

                options['point_file'] = coreNpyFile
                options['set_focal_node_currents_to_zero'] = True
                outputFN = 'Circuitscape_link' + linkId + '.out'
                options['output_file'] = path.join(OUTCIRCUITDIR, outputFN)
                if numElements > 250000:
                    options['print_timings'] = True
                configFN = 'pinchpoint_config' + linkId + '.ini'

                outConfigFile = path.join(CONFIGDIR, configFN)
                lu.writeCircuitscapeConfigFile(outConfigFile, options)
                gprint('Processing link ID #' + str(linkId) +
                       '. Resistance map'
                       ' has ' + str(int(numResistanceNodes)) + ' nodes.')

                memFlag = call_circuitscape(CSPATH, outConfigFile)

                currentFN = ('Circuitscape_link' + linkId + '_cum_curmap.npy')
                currentMap = path.join(OUTCIRCUITDIR, currentFN)

                if not arcpy.Exists(currentMap):
                    print_failure(numResistanceNodes, memFlag, 10)
                    numElements, numNodes = export_ras_to_npy(
                        resClipRasterMasked, resNpyFile)
                    memFlag = call_circuitscape(CSPATH, outConfigFile)

                    currentFN = ('Circuitscape_link' + linkId +
                                 '_cum_curmap.npy')
                    currentMap = path.join(OUTCIRCUITDIR, currentFN)

                if not arcpy.Exists(currentMap):
                    msg = (
                        '\nCircuitscape failed. See error information above.')
                    arcpy.AddError(msg)
                    lu.write_log(msg)
                    exit(1)

                # Either set core areas to nodata in current map or
                # divide each by its radius
                currentRaster = path.join(linkDir, "current" + tif)
                import_npy_to_ras(currentMap, corePairRaster, currentRaster)

                if cfg.WRITE_VOLT_MAPS == True:
                    voltFN = ('Circuitscape_link' + linkId + '_voltmap_' +
                              str(corex) + '_' + str(corey) + '.npy')
                    voltMap = path.join(OUTCIRCUITDIR, voltFN)
                    voltRaster = path.join(
                        outputGDB, cfg.PREFIX + "_voltMap_" + str(corex) +
                        '_' + str(corey))
                    import_npy_to_ras(voltMap, corePairRaster, voltRaster)
                    gprint('Building output statistics and pyramids '
                           'for voltage raster\n')
                    lu.build_stats(voltRaster)

                arcpy.env.extent = currentRaster

                if SETCORESTONULL:
                    # Set core areas to NoData in current map for color ramping
                    currentRaster2 = currentRaster + '2' + tif
                    outCon = arcpy.sa.Con(
                        arcpy.sa.IsNull(Raster(corePairRaster)),
                        Raster(currentRaster))
                    outCon.save(currentRaster2)
                    currentRaster = currentRaster2
                arcpy.env.extent = "MAXOF"
                if linkLoop == 1:
                    lu.delete_data(mosaicRaster)

                    @retry(10)
                    def copyRas2():
                        arcpy.CopyRaster_management(currentRaster,
                                                    mosaicRaster)

                    copyRas2()
                else:

                    @retry(10)
                    def mosaicRas():
                        arcpy.Mosaic_management(currentRaster, mosaicRaster,
                                                "MAXIMUM", "MATCH")

                    mosaicRas()

                resistancesFN = ('Circuitscape_link' + linkId +
                                 '_resistances_3columns.out')

                resistancesFile = path.join(OUTCIRCUITDIR, resistancesFN)
                resistances = npy.loadtxt(resistancesFile,
                                          dtype='Float64',
                                          comments='#')

                resistance = float(str(arcpy.env.cellSize)) * resistances[2]
                linkTable[link, cfg.LTB_EFFRESIST] = resistance

                # Ratio
                if not cfg.SQUARERESISTANCES:
                    linkTable[link, cfg.LTB_CWDTORR] = (
                        linkTable[link, cfg.LTB_CWDIST] /
                        linkTable[link, cfg.LTB_EFFRESIST])
                # Clean up
                if cfg.SAVE_TEMP_CIRCUIT_FILES == False:
                    lu.delete_file(coreNpyFile)
                    coreNpyBase, extension = path.splitext(coreNpyFile)
                    lu.delete_data(coreNpyBase + '.hdr')
                    lu.delete_file(resNpyFile)
                    resNpyBase, extension = path.splitext(resNpyFile)
                    lu.delete_data(resNpyBase + '.hdr')
                    lu.delete_file(currentMap)
                    curMapBase, extension = path.splitext(currentMap)
                    lu.delete_data(curMapBase + '.hdr')
                    lu.delete_data(currentRaster)
                    lu.clean_out_workspace(linkDir)
                    lu.delete_dir(linkDir)
                gprint('Finished with link ID #' + str(linkId) + '. ' +
                       str(linkLoop) + ' out of ' + str(numCorridorLinks) +
                       ' links have been processed.')
                start_time1 = lu.elapsed_time(start_time1)

            outputRaster = path.join(
                outputGDB, cfg.PREFIX + "_current_adjacentPairs_" + cutoffText)
            lu.delete_data(outputRaster)

            @retry(10)
            def copyRas():
                arcpy.CopyRaster_management(mosaicRaster, outputRaster)

            copyRas()

            gprint('Building output statistics and pyramids '
                   'for corridor pinch point raster\n')
            lu.build_stats(outputRaster)

            finalLinkTable = lu.update_lcp_shapefile(linkTable,
                                                     lastStep=5,
                                                     thisStep=8)

            linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv")
            lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile)
            linkTableFinalFile = path.join(
                cfg.OUTPUTDIR, cfg.PREFIX + "_linkTable_s5_plus.csv")
            lu.write_link_table(finalLinkTable, linkTableFinalFile,
                                inLinkTableFile)
            gprint('Copy of linkTable written to ' + linkTableFinalFile)
            #fixme: update sticks?

            gprint('Creating shapefiles with linework for links.')
            lu.write_link_maps(linkTableFinalFile, step=8)

            # Copy final link maps to gdb.
            lu.copy_final_link_maps(step=8)

            lu.delete_data(mosaicRaster)

        if not cfg.DO_ALLPAIRS:
            # Clean up temporary files
            if not cfg.SAVECURRENTMAPS:
                lu.delete_dir(OUTCIRCUITDIR)
            return

        lu.dashline(1)
        gprint('Mapping global pinch points among all\n'
               'core area pairs using Circuitscape.')

        if cfg.ALL_PAIR_SCENARIO == 'pairwise':
            gprint('Circuitscape will be run in PAIRWISE mode.')

        else:
            gprint('Circuitscape will be run in ALL-TO-ONE mode.')
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = cfg.RESRAST

        S8CORE_RAS = "s8core_ras"
        s8CoreRasPath = path.join(cfg.SCRATCHDIR, S8CORE_RAS)

        arcpy.FeatureToRaster_conversion(cfg.COREFC, cfg.COREFN, s8CoreRasPath,
                                         arcpy.env.cellSize)
        binaryCoreRaster = path.join(cfg.SCRATCHDIR, "core_ras_bin")

        # The following commands cause file lock problems on save.  using gp
        # instead.
        # outCon = arcpy.sa.Con(S8CORE_RAS, 1, "#", "VALUE > 0")
        # outCon.save(binaryCoreRaster)
        # gp.Con_sa(s8CoreRasPath, 1, binaryCoreRaster, "#", "VALUE > 0")
        outCon = arcpy.sa.Con(Raster(s8CoreRasPath) > 0, 1)
        outCon.save(binaryCoreRaster)
        s5corridorRas = path.join(cfg.OUTPUTGDB, cfg.PREFIX + "_corridors")

        if not arcpy.Exists(s5corridorRas):
            s5corridorRas = path.join(cfg.OUTPUTGDB,
                                      cfg.PREFIX + "_lcc_mosaic_int")

        outCon = arcpy.sa.Con(
            Raster(s5corridorRas) <= cfg.CWDCUTOFF, Raster(resRaster),
            arcpy.sa.Con(Raster(binaryCoreRaster) > 0, Raster(resRaster)))

        resRasClipPath = path.join(cfg.SCRATCHDIR, 'res_ras_clip')
        outCon.save(resRasClipPath)

        arcpy.env.cellSize = resRasClipPath
        arcpy.env.extent = resRasClipPath
        s8CoreRasClipped = s8CoreRasPath + '_c'

        # Produce core raster with same extent as clipped resistance raster
        # added to ensure correct data type- nodata values were positive for
        # cores otherwise
        outCon = arcpy.sa.Con(arcpy.sa.IsNull(Raster(s8CoreRasPath)), -9999,
                              Raster(s8CoreRasPath))
        outCon.save(s8CoreRasClipped)

        resNpyFN = 'resistances.npy'
        resNpyFile = path.join(INCIRCUITDIR, resNpyFN)
        numElements, numResistanceNodes = export_ras_to_npy(
            resRasClipPath, resNpyFile)

        totMem, availMem = lu.get_mem()
        # gprint('Total memory: str(totMem))
        if numResistanceNodes / availMem > 2000000:
            lu.dashline(1)
            lu.warn('Warning:')
            lu.warn('Circuitscape can only solve 2-3 million nodes')
            lu.warn('per gigabyte of available RAM. \nTotal physical RAM '
                    'on your machine is ~' + str(totMem) +
                    ' GB. \nAvailable memory is ~' + str(availMem) +
                    ' GB. \nYour resistance raster has ' +
                    str(numResistanceNodes) + ' nodes.')
            lu.dashline(0)

        coreNpyFN = 'cores.npy'
        coreNpyFile = path.join(INCIRCUITDIR, coreNpyFN)
        numElements, numNodes = export_ras_to_npy(s8CoreRasClipped,
                                                  coreNpyFile)

        arcpy.env.extent = "MINOF"

        options = lu.setCircuitscapeOptions()
        options['scenario'] = cfg.ALL_PAIR_SCENARIO
        options['habitat_file'] = resNpyFile
        options['point_file'] = coreNpyFile
        options['set_focal_node_currents_to_zero'] = True
        outputFN = 'Circuitscape.out'
        options['output_file'] = path.join(OUTCIRCUITDIR, outputFN)
        options['print_timings'] = True
        configFN = 'pinchpoint_allpair_config.ini'
        outConfigFile = path.join(CONFIGDIR, configFN)
        lu.writeCircuitscapeConfigFile(outConfigFile, options)
        gprint('\nResistance map has ' + str(int(numResistanceNodes)) +
               ' nodes.')
        lu.dashline(1)
        gprint('If you try to cancel your run and the Arc dialog hangs, ')
        gprint('you can kill Circuitscape by opening Windows Task Manager')
        gprint('and ending the cs_run.exe process.')
        lu.dashline(0)

        call_circuitscape(CSPATH, outConfigFile)
        # test = subprocess.call([CSPATH, outConfigFile],
        # creationflags = subprocess.CREATE_NEW_CONSOLE)

        if options['scenario'] == 'pairwise':
            rasterSuffix = "_current_allPairs_" + cutoffText

        else:
            rasterSuffix = "_current_allToOne_" + cutoffText

        currentFN = 'Circuitscape_cum_curmap.npy'
        currentMap = path.join(OUTCIRCUITDIR, currentFN)
        outputRaster = path.join(outputGDB, cfg.PREFIX + rasterSuffix)
        currentRaster = path.join(cfg.SCRATCHDIR, "current")

        try:
            import_npy_to_ras(currentMap, resRasClipPath, outputRaster)
        except:
            lu.dashline(1)
            msg = ('ERROR: Circuitscape failed. \n'
                   'Note: Circuitscape can only solve 2-3 million nodes'
                   '\nper gigabyte of available RAM. The resistance '
                   '\nraster for the last corridor had ' +
                   str(numResistanceNodes) + ' nodes.\n\nResistance '
                   'raster values that vary by >6 orders of \nmagnitude'
                   ' can also cause failures, as can a mismatch in '
                   '\ncore area and resistance raster extents.')
            arcpy.AddError(msg)
            lu.write_log(msg)
            exit(1)

        #set core areas to nodata
        if SETCORESTONULL:
            # Set core areas to NoData in current map for color ramping
            outputRasterND = outputRaster + '_noDataCores'
            outCon = arcpy.sa.SetNull(
                Raster(s8CoreRasClipped) > 0, Raster(outputRaster))
            outCon.save(outputRasterND)

        gprint('\nBuilding output statistics and pyramids '
               'for centrality raster.')
        lu.build_stats(outputRaster)
        lu.build_stats(outputRasterND)

        # Clean up temporary files
        if not cfg.SAVECURRENTMAPS:
            lu.delete_dir(OUTCIRCUITDIR)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 8. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 8. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 21
0
def main(argv=None):
    """Iterate over LM, BM, and restoration tasks."""
    if argv is None:
        argv = sys.argv  # Get parameters from ArcGIS tool dialog

    start_time = time.clock()

    # USER SETTINGS ######################################################

    # Restoration Settings
    # ALL input data must be in the same projection

    # Set to True to restore highest ROI. Set to False to restore strongest
    # barrier
    restore_max_roi = argv[1]

    # Resistance value of restored habitat.  Must be 1 or greater.
    restored_resistance_val = argv[2]

    # No spaces or special chars in paths or gdb names
    restoration_data_gdb = argv[3]

    # No spaces in path, avoid using dropbox or network drive
    # Project directories will be created in this (iter1, iter2...) as will an
    # output geodatabase
    output_dir = argv[4]

    # Resistance raster. Should be in input GDB
    resistance_ras = argv[5]
    # Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant'
    core_fc = argv[6]

    core_fn = argv[7]  # Core area field name

    radius = argv[8]  # Restoration radius in meters
    iterations = argv[9]  # Number of restorations to perform

    # If less than this proportion of ag in circle, don't consider restoring
    # circle
    min_ag_threshold = argv[10]

    # Don't consider barriers below this improvement score (average improvement
    # per meter diameter restored)
    min_improvement_val = argv[11]

    # Average per-m2 parcel cost per pixel. Snapped to resistance raster.
    parcel_cost_ras = argv[12]

    # Right now this is just a raster with all pixels set to 0.113174
    restoration_cost_ras = argv[13]

    ag_ras = argv[14]  # 1=Ag, 0=Not Ag

    # Some restorations benefit multiple corridors.
    # 'Maximum' takes the greatest improvement across core area pairs
    # 'Sum' adds improvement scores acreoss all pairs.
    barrier_combine_method = argv[15]

    # Use cwd_thresh = None for no threshold. Use cwd_thresh = X to not
    # consider restorations more than X map units away from each core area.
    cwd_thresh = argv[16]

    # END USER SETTINGS ######################################################

    try:
        # Setup path and create directories
        gprint('Hey! Make sure everything is in the same projection!\n')
        gprint('Setting up paths and creating directories')
        sys.path.append('..\\toolbox\\scripts')
        res_ras = os.path.join(restoration_data_gdb, resistance_ras)
        core_fc_path = os.path.join(restoration_data_gdb, core_fc)

        # Set up a NEW output gdb (leave previous ones on drive)
        i = None
        for i in range(1, 200):
            output_gdb = 'restorationOutput' + str(i) + '.gdb'
            if not arcpy.Exists(os.path.join(output_dir, output_gdb)):
                break
            gprint('Previous output GDB ' + output_gdb + ' exists.  '
                   'Delete to save disk space.')
        arcpy.CreateFileGDB_management(output_dir, output_gdb)
        output_gdb = os.path.join(output_dir, output_gdb)
        log_file = os.path.join(output_gdb,
                                'Iterate Barriers' + str(i) + '.py')

        # Write a copy of this file to output dir as a record of settings
        shutil.copyfile(__file__, log_file)

        arcpy.env.cellSize = res_ras
        arcpy.env.extent = res_ras
        arcpy.env.snapRaster = res_ras
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = output_gdb
        arcpy.env.workspace = output_gdb

        spatialref = arcpy.Describe(res_ras).spatialReference
        mapunits = spatialref.linearUnitName
        gprint('Cell size = ' + str(arcpy.env.cellSize) + ' ' + mapunits + 's')

        # Calculate fraction of ag within radius of each pixel
        gprint('Calculating purchase cost, fraction of ag, etc within radius '
               'of each pixel.')
        ag_ras = os.path.join(restoration_data_gdb, ag_ras)
        in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP")
        arcpy.env.extent = ag_ras
        out_focal_stats = arcpy.sa.FocalStatistics(ag_ras, in_neighborhood,
                                                   "MEAN", "NODATA")
        proportion_ag_ras = os.path.join(output_gdb, 'proportionAgRas')
        out_focal_stats.save(proportion_ag_ras)
        arcpy.env.extent = res_ras

        # Calculate purchase cost of circles
        parcel_cost_ras = os.path.join(restoration_data_gdb, parcel_cost_ras)
        arcpy.env.extent = parcel_cost_ras
        out_focal_stats = arcpy.sa.FocalStatistics(parcel_cost_ras,
                                                   in_neighborhood, "MEAN",
                                                   "DATA")
        cost_focal_stats_ras = os.path.join(output_gdb, 'cost_focal_stats_ras')
        out_focal_stats.save(cost_focal_stats_ras)
        arcpy.env.extent = res_ras

        circle_area = float(npy.pi * radius * radius)
        outras = arcpy.sa.Raster(cost_focal_stats_ras) * circle_area
        purch_cost_ras = os.path.join(output_gdb, 'purchaseCostRaster')
        outras.save(purch_cost_ras)
        lu.delete_data(cost_focal_stats_ras)

        restoration_cost_ras = os.path.join(restoration_data_gdb,
                                            restoration_cost_ras)
        outras = (
            arcpy.sa.Raster(purch_cost_ras) +
            (arcpy.sa.Raster(restoration_cost_ras) * radius * radius * npy.pi))
        total_cost_ras = os.path.join(output_gdb, 'totalCostRaster')
        outras.save(total_cost_ras)

        # Create mask to remove areas without cost data
        arcpy.env.extent = total_cost_ras
        cost_mask_ras = os.path.join(output_gdb, 'costMaskRaster')
        cost_thresh = 0
        out_con = arcpy.sa.Con(
            (arcpy.sa.Raster(total_cost_ras) > float(cost_thresh)), 1)
        out_con.save(cost_mask_ras)
        arcpy.env.extent = res_ras

        # Create mask to remove areas below ag threshold
        out_con = arcpy.sa.Con(
            (arcpy.sa.Raster(proportion_ag_ras) > float(min_ag_threshold)), 1)
        ag_mask_ras = os.path.join(output_gdb, 'agMaskRaster')
        out_con.save(ag_mask_ras)

        do_step_1 = 'true'
        do_step_2 = 'true'
        do_step_5 = 'false'
        all_restored_areas_ras = ''

        for cur_iter in range(1, iterations + 1):
            start_time1 = time.clock()

            # Some env settings get changed by linkage mapper and must be
            # reset here
            arcpy.env.cellSize = res_ras
            arcpy.env.extent = res_ras
            arcpy.env.snapRaster = res_ras
            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            lu.dashline(1)
            gprint('Running iteration number ' + str(cur_iter))
            proj_dir = os.path.join(output_dir,
                                    'iter' + str(cur_iter) + 'Proj')
            lu.create_dir(output_dir)
            lu.delete_dir(proj_dir)
            lu.create_dir(proj_dir)
            if cur_iter > 1:  # Copy previous s2 linktable to new project dir
                datapass_dir = os.path.join(proj_dir, 'datapass')
                lu.create_dir(datapass_dir)
                proj_dir1 = os.path.join(output_dir, 'iter1Proj')
                datapass_dir_iter1 = os.path.join(proj_dir1, 'datapass')
                s2_link_tbl_iter1 = os.path.join(datapass_dir_iter1,
                                                 'linkTable_s2.csv')
                s2_link_tbl = os.path.join(datapass_dir, 'linkTable_s2.csv')
                shutil.copyfile(s2_link_tbl_iter1, s2_link_tbl)

            # Run Linkage Mapper

            # Copy distances text file from earlier LM run to the output
            # directory- speeds things up!
            dist_file = os.path.join(output_dir, core_fc + '_dists.txt')

            if not os.path.exists(dist_file):
                if cur_iter == 1:
                    gprint('Will calculate distance file.')
                    dist_file = '#'
                else:
                    proj_dir1 = os.path.join(output_dir, 'iter1Proj')
                    dist_file1 = os.path.join(proj_dir1,
                                              core_fc + '_dists.txt')
                    # Put a copy here for future runs
                    shutil.copyfile(dist_file1, dist_file)

            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            argv = ('lm_master.py', proj_dir, core_fc_path, core_fn, res_ras,
                    do_step_1, do_step_2, 'Cost-Weighted & Euclidean',
                    dist_file, 'true', 'true', 'false', '4', 'Cost-Weighted',
                    'true', do_step_5, 'true', '200000', '10000', '#', '#',
                    '#', '#')
            gprint('Running ' + str(argv))
            lm_master.lm_master(argv)
            do_step_1 = 'false'  # Can skip for future iterations
            do_step_2 = 'false'  # Can skip for future iterations
            do_step_5 = 'false'  # Skipping for future iterations

            start_radius = str(radius)
            end_radius = str(radius)
            radius_step = '0'
            save_radius_ras = 'false'
            write_pct_ras = 'false'

            argv = ('barrier_master.py', proj_dir, res_ras, start_radius,
                    end_radius, radius_step, barrier_combine_method,
                    save_radius_ras, write_pct_ras, cwd_thresh)
            gprint('Running ' + str(argv))
            barrier_master.bar_master(argv)

            # Some env settings get changed by linkage mapper and must be
            # reset here
            arcpy.env.cellSize = res_ras
            arcpy.env.extent = res_ras
            arcpy.env.snapRaster = res_ras
            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            gprint('Finding restoration circles with max barrier score / ROI')
            # Find points with max ROI
            prefix = os.path.basename(proj_dir)
            if barrier_combine_method == 'Sum':
                sum_suffix = 'Sum'
            else:
                sum_suffix = ''
            barrier_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" +
                          str(radius))
            barrier_ras = os.path.join(proj_dir, 'output', 'barriers.gdb',
                                       barrier_fn)
            if not arcpy.Exists(barrier_ras):
                msg = ('Error: cannot find barrier output: ' + barrier_ras)
                lu.raise_error(msg)

            if cur_iter > 1:
                gprint('Creating mask for previously restored areas')
                in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP")
                arcpy.env.extent = all_restored_areas_ras
                out_focal_stats = arcpy.sa.FocalStatistics(
                    all_restored_areas_ras, in_neighborhood, "MEAN", "DATA")
                all_restored_focal_ras = os.path.join(
                    output_gdb, 'allRestFocRas_iter' + str(cur_iter))

                # Anything > 0 would include a restored area
                out_focal_stats.save(all_restored_focal_ras)
                arcpy.env.extent = res_ras
                rest_mask_ras = os.path.join(
                    output_gdb, 'restMaskRaster_iter' + str(cur_iter))
                minval = 0
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(all_restored_focal_ras) == float(minval)),
                    1)
                out_con.save(rest_mask_ras)

            # Candidate areas have not been restored, have cost data, meet
            # minimum improvement score criteria, and have enough ag in them
            candidate_barrier_ras = os.path.join(
                output_gdb, 'candidateBarrierRaster' + '_iter' + str(cur_iter))
            if cur_iter > 1:
                gprint('Creating candidate restoration raster using barrier '
                       'results, previous restorations, and selection '
                       'criteria')

                # ROI scores will be in terms of total improvement
                # (= score * diameter)
                out_calc = (arcpy.sa.Raster(cost_mask_ras) *
                            arcpy.sa.Raster(ag_mask_ras) *
                            arcpy.sa.Raster(barrier_ras) *
                            arcpy.sa.Raster(rest_mask_ras) * (radius * 2))
            else:
                out_calc = (arcpy.sa.Raster(cost_mask_ras) *
                            arcpy.sa.Raster(ag_mask_ras) *
                            arcpy.sa.Raster(barrier_ras) * radius * 2)

            min_barrier_score = min_improvement_val * radius * 2
            if restored_resistance_val != 1:
                out_calc_2 = (out_calc - (2 * radius *
                                          (restored_resistance_val - 1)))
                out_con = arcpy.sa.Con(
                    (out_calc_2 >= float(min_barrier_score)), out_calc_2)
            else:
                out_con = arcpy.sa.Con((out_calc >= float(min_barrier_score)),
                                       out_calc)
            out_con.save(candidate_barrier_ras)
            lu.build_stats(candidate_barrier_ras)

            purchase_roi_ras = os.path.join(
                output_gdb, 'purchaseRoiRaster' + '_iter' + str(cur_iter))
            out_calc = (arcpy.sa.Raster(candidate_barrier_ras) /
                        arcpy.sa.Raster(purch_cost_ras))
            out_calc.save(purchase_roi_ras)
            lu.build_stats(purchase_roi_ras)

            total_roi_ras = os.path.join(
                output_gdb, 'purchaseRestRoiRaster' + '_iter' + str(cur_iter))
            out_calc = (arcpy.sa.Raster(candidate_barrier_ras) /
                        arcpy.sa.Raster(total_cost_ras))
            out_calc.save(total_roi_ras)
            lu.build_stats(total_roi_ras)

            max_barrier = float(
                arcpy.GetRasterProperties_management(candidate_barrier_ras,
                                                     "MAXIMUM").getOutput(0))
            gprint('Maximum barrier improvement score: ' + str(max_barrier))
            if max_barrier < 0:
                arcpy.AddWarning("\nNo barriers found that meet CWD or Ag "
                                 "threshold criteria.")

            max_purch_roi = arcpy.GetRasterProperties_management(
                purchase_roi_ras, "MAXIMUM")
            gprint('Maximum purchase ROI score: ' +
                   str(max_purch_roi.getOutput(0)))

            max_roi = arcpy.GetRasterProperties_management(
                total_roi_ras, "MAXIMUM")
            gprint('Maximum total ROI score: ' + str(max_roi.getOutput(0)))

            if restore_max_roi:
                out_point = os.path.join(
                    output_gdb, 'maxRoiPoint' + '_iter' + str(cur_iter))
                gprint('Choosing circle with maximum ROI to restore')
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(total_roi_ras) >= float(
                        max_roi.getOutput(0))), total_roi_ras)
                max_roi_ras = os.path.join(output_gdb, 'max_roi_ras')
                out_con.save(max_roi_ras)
                # Save max ROI to point
                try:
                    arcpy.RasterToPoint_conversion(max_roi_ras, out_point)
                except Exception:
                    msg = ('Error: it looks like there are no viable '
                           'restoration candidates.')
                    lu.raise_error(msg)

            else:  # Restoring strongest barrier instead
                out_point = os.path.join(
                    output_gdb, 'maxBarrierPoint' + '_iter' + str(cur_iter))
                gprint('Choosing circle with maximum BARRIER IMPROVEMENT SCORE'
                       ' to restore')
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(candidate_barrier_ras) >= max_barrier),
                    candidate_barrier_ras)
                max_barrier_ras = os.path.join(output_gdb, 'maxBarrierRaster')
                out_con.save(max_barrier_ras)
                # Save max barrier to point
                try:
                    arcpy.RasterToPoint_conversion(max_barrier_ras, out_point)
                except Exception:
                    msg = ('Error: it looks like there are no viable '
                           'restoration candidates.')
                    lu.raise_error(msg)

            gprint('Done evaluating candidate restorations')
            result = int(arcpy.GetCount_management(out_point).getOutput(0))
            if result > 1:
                # Would be better to retain point with max barrier score when
                # we have multiple points with same ROI
                arcpy.AddWarning('Deleting points with identical '
                                 'ROI/improvement score values')

                arcpy.DeleteIdentical_management(out_point, "grid_code", 0.1,
                                                 0.1)

            arcpy.sa.ExtractMultiValuesToPoints(
                out_point,
                [[candidate_barrier_ras, "barrierScore"],
                 [purch_cost_ras, "purchCost"], [total_cost_ras, "totalCost"],
                 [purchase_roi_ras, "purchaseROI"],
                 [total_roi_ras, "totalROI"]], "NONE")

            arcpy.AddField_management(out_point, "restorationNumber", "SHORT")
            arcpy.CalculateField_management(out_point, "restorationNumber",
                                            cur_iter, "PYTHON_9.3")
            arcpy.AddField_management(out_point, "radius", "DOUBLE")
            arcpy.CalculateField_management(out_point, "radius", radius,
                                            "PYTHON_9.3")
            arcpy.AddField_management(out_point, "barrierScore_per_m",
                                      "DOUBLE")
            arcpy.CalculateField_management(
                out_point, "barrierScore_per_m",
                "(float(!barrierScore!) / (!radius! * 2))", "PYTHON_9.3")

            gprint('\nCreating restoration circles')
            if restore_max_roi:
                circle_fc = os.path.join(
                    output_gdb, 'maxRoiCircle' + '_iter' + str(cur_iter))
            else:
                circle_fc = os.path.join(
                    output_gdb, 'maxBarrierCircle' + '_iter' + str(cur_iter))
            arcpy.Buffer_analysis(out_point, circle_fc, radius)
            gprint('Rasterizing restoration circles')
            if restore_max_roi:
                circle_ras = os.path.join(
                    output_gdb, 'maxRoicircle_ras' + '_iter' + str(cur_iter))
            else:
                circle_ras = os.path.join(
                    output_gdb,
                    'maxBarrierCircleRas' + '_iter' + str(cur_iter))
            arcpy.FeatureToRaster_conversion(circle_fc, 'totalROI', circle_ras,
                                             arcpy.env.cellSize)

            # restore raster
            gprint('Digitally restoring resistance raster')
            res_ras_restored = os.path.join(
                output_gdb, 'resRastRestored' + '_iter' + str(cur_iter))
            out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), res_ras,
                                   restored_resistance_val)
            out_con.save(res_ras_restored)

            all_restored_areas_ras = os.path.join(
                output_gdb, 'allRestoredAreas_iter' + str(cur_iter))
            prev_restored_areas_ras = os.path.join(
                output_gdb, 'allRestoredAreas_iter' + str(cur_iter - 1))
            if cur_iter == 1:
                out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), 0, 1)
            else:
                # Add this restoration to areas restored
                out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras),
                                       prev_restored_areas_ras, 1)
            out_con.save(all_restored_areas_ras)

            lu.delete_data(circle_ras)

            # Use for next iteration resistance raster
            res_ras = res_ras_restored

            # Add circle into feature class with all circles
            if restore_max_roi:
                all_circles_fc = os.path.join(output_gdb, "allCirclesMaxROI")
            else:
                all_circles_fc = os.path.join(output_gdb,
                                              "allCirclesMaxBarriers")
            if cur_iter == 1:
                arcpy.CopyFeatures_management(circle_fc, all_circles_fc)
            else:
                arcpy.Append_management(circle_fc, all_circles_fc, "TEST")
            gprint('Finished iteration #' + str(cur_iter))
            start_time1 = lu.elapsed_time(start_time1)

        gprint('\nDone with iterations.')
        start_time = lu.elapsed_time(start_time)
        gprint('Outputs saved in: ' + output_gdb)
        gprint('Back up your project directories if you want to save '
               'corridor/barrier results.')

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemplo n.º 22
0
def STEP6_calc_barriers():
    """Detects influential barriers given CWD calculations from
       s3_calcCwds.py.

    """
    
    try:
        arcpy.CheckOutExtension("spatial")
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)
        
        if cfg.BARRIER_CWD_THRESH is not None:
            lu.dashline(1)
            gprint('Invoking CWD Threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.')
        
        if cfg.SUM_BARRIERS:
            sumSuffix = '_Sum'
            cfg.BARRIERBASEDIR = cfg.BARRIERBASEDIR + sumSuffix
            baseName, extension = path.splitext(cfg.BARRIERGDB)
            cfg.BARRIERGDB = baseName + sumSuffix + extension

            gprint('\nBarrier scores will be SUMMED across core pairs.')
        else:
            sumSuffix = ''

        # Delete contents of final ouptut geodatabase           
        # lu.clean_out_workspace(cfg.BARRIERGDB) #xxx try not doing this to allow multiple radii to be analyzed in separate runs
        if not arcpy.Exists(cfg.BARRIERGDB):
            # Create output geodatabase
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR,
                                           path.basename(cfg.BARRIERGDB))             
                                           
        startRadius = int(cfg.STARTRADIUS)
        endRadius = int(cfg.ENDRADIUS)
        radiusStep = int(cfg.RADIUSSTEP)
        if radiusStep == 0:
            endRadius = startRadius # Calculate at just one radius value
            radiusStep = 1
        linkTableFile = lu.get_prev_step_link_table(step=6)
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.RefreshCatalog(cfg.PROJECTDIR)
        PREFIX = path.basename(cfg.PROJECTDIR)
        # For speed:
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # set the analysis extent and cell size to that of the resistance
        # surface
        arcpy.OverWriteOutput = True
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = cfg.RESRAST
        arcpy.env.snapRaster = cfg.RESRAST
        spatialref = arcpy.Describe(cfg.RESRAST).spatialReference                
        mapUnits = (str(spatialref.linearUnitName)).lower()
        if len(mapUnits) > 1 and mapUnits[-1] != 's':
            mapUnits = mapUnits + 's'

        if float(arcpy.env.cellSize) > startRadius or startRadius > endRadius:
            msg = ('Error: minimum detection radius must be greater than '
                    'cell size (' + str(arcpy.env.cellSize) +
                    ') \nand less than or equal to maximum detection radius.')
            lu.raise_error(msg)

        linkTable = lu.load_link_table(linkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg =('\nThere are no linkages. Bailing.')
            lu.raise_error(msg)
        
        # set up directories for barrier and barrier mosaic grids
        dirCount = 0
        gprint("Creating intermediate output folder: " + cfg.BARRIERBASEDIR)
        lu.delete_dir(cfg.BARRIERBASEDIR)
        lu.create_dir(cfg.BARRIERBASEDIR)
        arcpy.CreateFolder_management(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM)
        cbarrierdir = path.join(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM)

        coresToProcess = npy.unique(linkTable
                                    [:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        maxCoreNum = max(coresToProcess)

        # Set up focal directories.
        # To keep there from being > 100 grids in any one directory,
        # outputs are written to:
        # barrier\focalX_ for cores 1-99 at radius X
        # barrier\focalX_1 for cores 100-199
        # etc.
        lu.dashline(0)

        for radius in range(startRadius, endRadius + 1, radiusStep):
            core1path = lu.get_focal_path(1,radius)
            path1, dir1 = path.split(core1path)
            path2, dir2 = path.split(path1)
            arcpy.CreateFolder_management(path.dirname(path2),
                                          path.basename(path2))
            arcpy.CreateFolder_management(path.dirname(path1),
                                         path.basename(path1))

            if maxCoreNum > 99:
                gprint('Creating subdirectories for ' + str(radius) + ' ' + 
                       str(mapUnits) + ' radius analysis scale.')
                maxDirCount = int(maxCoreNum/100)
                focalDirBaseName = dir2

                cp100 = (coresToProcess.astype('int32'))/100
                ind = npy.where(cp100 > 0)
                dirNums = npy.unique(cp100[ind])
                for dirNum in dirNums:
                    focalDir = focalDirBaseName + str(dirNum)
                    gprint('...' + focalDir)
                    arcpy.CreateFolder_management(path2, focalDir)                        
        
        # Create resistance raster with filled-in Nodata values for later use
        arcpy.env.extent = cfg.RESRAST
        resistFillRaster = path.join(cfg.SCRATCHDIR, "resist_fill")
        output = arcpy.sa.Con(IsNull(cfg.RESRAST), 1000000000, 
                              Raster(cfg.RESRAST) - 1)
        output.save(resistFillRaster)

        coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1]
        coreList = npy.sort(coreList)

        # Loop through each search radius to calculate barriers in each link
        import time
        radId = 0 #keep track of number of radii processed- used for temp dir naming
        for radius in range (startRadius, endRadius + 1, radiusStep):
            radId = radId + 1
            linkTableTemp = linkTable.copy()
            @retry(10)
            #can't pass vars in and modify them. 
            def doRadiusLoop():
                linkTable = linkTableTemp.copy()
                startTime = time.clock()
                randomerror()
                linkLoop = 0
                pctDone = 0
                gprint('\nMapping barriers at a radius of ' + str(radius) +
                       ' ' + str(mapUnits))             
                if cfg.SUM_BARRIERS:  
                    gprint('using SUM method')
                else:
                    gprint('using MAXIMUM method')                   
                if numCorridorLinks > 1:
                    gprint('0 percent done')
                lastMosaicRaster = None
                lastMosaicRasterPct = None
                for x in range(0,numLinks):
                    pctDone = lu.report_pct_done(linkLoop, numCorridorLinks,
                                                pctDone)
                    linkId = str(int(linkTable[x,cfg.LTB_LINKID]))
                    if ((linkTable[x,cfg.LTB_LINKTYPE] > 0) and
                       (linkTable[x,cfg.LTB_LINKTYPE] < 1000)):
                        linkLoop = linkLoop + 1
                        # source and target cores
                        corex=int(coreList[x,0])
                        corey=int(coreList[x,1])

                        # Get cwd rasters for source and target cores
                        cwdRaster1 = lu.get_cwd_path(corex)
                        cwdRaster2 = lu.get_cwd_path(corey)
                        
                        # Mask out areas above CWD threshold
                        cwdTemp1 = None
                        cwdTemp2 = None
                        if cfg.BARRIER_CWD_THRESH is not None:
                            if x == 1:
                                lu.dashline(1)
                                gprint('  Using CWD threshold of ' + str(cfg.BARRIER_CWD_THRESH) + ' map units.')
                            arcpy.env.extent = cfg.RESRAST
                            arcpy.env.cellSize = cfg.RESRAST
                            arcpy.env.snapRaster = cfg.RESRAST
                            cwdTemp1 = path.join(cfg.SCRATCHDIR, "tmp"+str(corex))
                            outCon = arcpy.sa.Con(cwdRaster1 < float(cfg.BARRIER_CWD_THRESH),cwdRaster1)
                            outCon.save(cwdTemp1)
                            cwdRaster1 = cwdTemp1
                            cwdTemp2 = path.join(cfg.SCRATCHDIR, "tmp"+str(corey))
                            outCon = arcpy.sa.Con(cwdRaster2 < float(cfg.BARRIER_CWD_THRESH),cwdRaster2)
                            outCon.save(cwdTemp2)
                            cwdRaster2 = cwdTemp2                        
                        
                        focalRaster1 = lu.get_focal_path(corex,radius)
                        focalRaster2 = lu.get_focal_path(corey,radius)
                                                                     
                        link = lu.get_links_from_core_pairs(linkTable,
                                                            corex, corey)
                        lcDist = float(linkTable[link,cfg.LTB_CWDIST])
                        
                        # Detect barriers at radius using neighborhood stats
                        # Create the Neighborhood Object
                        innerRadius = radius - 1
                        outerRadius = radius

                        dia = 2 * radius
                        InNeighborhood = ("ANNULUS " + str(innerRadius) + " " +
                                         str(outerRadius) + " MAP")

                        @retry(10)
                        def execFocal():
                            randomerror()
                            # Execute FocalStatistics
                            if not path.exists(focalRaster1):
                                arcpy.env.extent = cwdRaster1
                                outFocalStats = arcpy.sa.FocalStatistics(cwdRaster1,
                                                    InNeighborhood, "MINIMUM","DATA")
                                if setCoresToNull:                    
                                    outFocalStats2 = arcpy.sa.Con(outFocalStats > 0, outFocalStats) # Set areas overlapping cores to NoData xxx
                                    outFocalStats2.save(focalRaster1) #xxx
                                else:
                                    outFocalStats.save(focalRaster1) #xxx
                                arcpy.env.extent = cfg.RESRAST

                            if not path.exists(focalRaster2):
                                arcpy.env.extent = cwdRaster2
                                outFocalStats = arcpy.sa.FocalStatistics(cwdRaster2,
                                                InNeighborhood, "MINIMUM","DATA")
                                if setCoresToNull:                    
                                    outFocalStats2 = arcpy.sa.Con(outFocalStats > 0, outFocalStats) # Set areas overlapping cores to NoData xxx
                                    outFocalStats2.save(focalRaster2)#xxx
                                else:
                                    outFocalStats.save(focalRaster2) #xxx

                                arcpy.env.extent = cfg.RESRAST
                        execFocal()
                                                
                        lu.delete_data(cwdTemp1)
                        lu.delete_data(cwdTemp2)
                        
                        barrierRaster = path.join(cbarrierdir, "b" + str(radius)
                              + "_" + str(corex) + "_" +
                              str(corey)+'.tif') 
                             
                        if cfg.SUM_BARRIERS: # Need to set nulls to 0, also 
                                             # create trim rasters as we go

                            outRas = ((lcDist - Raster(focalRaster1) - 
                                      Raster(focalRaster2) - dia) / dia)
                            outCon = arcpy.sa.Con(IsNull(outRas),0,outRas)
                            outCon2 = arcpy.sa.Con(outCon<0,0,outCon)                            
                            outCon2.save(barrierRaster)
                            
                            # Execute FocalStatistics to fill out search radii                            
                            InNeighborhood = "CIRCLE " + str(outerRadius) + " MAP"
                            fillRaster = path.join(cbarrierdir, "b" + str(radius)
                            + "_" + str(corex) + "_" + str(corey) +"_fill.tif")
                            outFocalStats = arcpy.sa.FocalStatistics(barrierRaster,
                                                  InNeighborhood, "MAXIMUM","DATA")
                            outFocalStats.save(fillRaster)                            

                            if cfg.WRITE_TRIM_RASTERS:                            
                                trmRaster = path.join(cbarrierdir, "b" + 
                                                      str(radius)
                                + "_" + str(corex) + "_" + str(corey) +"_trim.tif")
                                rasterList = [fillRaster, resistFillRaster]
                                outCellStatistics = arcpy.sa.CellStatistics(
                                                            rasterList, "MINIMUM")
                                outCellStatistics.save(trmRaster)
                               
                        else:
                            #Calculate potential benefit per map unit restored
                            @retry(10)
                            def calcBen():
                                randomerror()
                                outRas = ((lcDist - Raster(focalRaster1)
                                      - Raster(focalRaster2) - dia) / dia)
                                outRas.save(barrierRaster)
                            calcBen()

                        if cfg.WRITE_PCT_RASTERS:
                            #Calculate PERCENT potential benefit per unit restored                        
                            barrierRasterPct = path.join(cbarrierdir, "b" + 
                                                    str(radius)
                                                    + "_" + str(corex) + "_" +
                                                    str(corey)+'_pct.tif') 
                            @retry(10)
                            def calcBenPct():                            
                                randomerror()
                                outras = (100 * (Raster(barrierRaster) / lcDist))
                                outras.save(barrierRasterPct)
                            calcBenPct()
                            
                        # Mosaic barrier results across core area pairs                    
                        mosaicDir = path.join(cfg.SCRATCHDIR,'mos'+str(radId)+'_'+str(x+1)) 
                        lu.create_dir(mosaicDir)
                        
                        mosFN = 'mos_temp'
                        tempMosaicRaster = path.join(mosaicDir,mosFN)
                        tempMosaicRasterTrim = path.join(mosaicDir,'mos_temp_trm')
                        arcpy.env.workspace = mosaicDir            
                        if linkLoop == 1:
                            #If this is the first grid then copy rather than mosaic
                            arcpy.CopyRaster_management(barrierRaster, 
                                                            tempMosaicRaster)
                            if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                                arcpy.CopyRaster_management(trmRaster, 
                                                            tempMosaicRasterTrim)                       
                            
                        else:                    
                            if cfg.SUM_BARRIERS:
                                outCon = arcpy.sa.Con(Raster (barrierRaster) < 0, lastMosaicRaster, 
                                        Raster(barrierRaster) + Raster(lastMosaicRaster))
                                outCon.save(tempMosaicRaster)                      
                                if cfg.WRITE_TRIM_RASTERS:
                                    outCon = arcpy.sa.Con(Raster
                                    (trmRaster) < 0, lastMosaicRasterTrim, 
                                    Raster(trmRaster) + Raster(
                                    lastMosaicRasterTrim))
                                    outCon.save(tempMosaicRasterTrim) 
                                
                            else:
                                rasterString = ('"'+barrierRaster+";" + 
                                                lastMosaicRaster+'"')
                                @retry(10)
                                def mosaicToNew():
                                    randomerror()
                                    arcpy.MosaicToNewRaster_management(
                                        rasterString,mosaicDir,mosFN, "", 
                                        "32_BIT_FLOAT", arcpy.env.cellSize, "1", 
                                        "MAXIMUM", "MATCH")
                                mosaicToNew()
                                # gprint(str(corex)+'0'+str(corey))
                                
                                
                        if linkLoop>1: #Clean up from previous loop
                            lu.delete_data(lastMosaicRaster)
                            lastMosaicDir =path.dirname(lastMosaicRaster) 
                            lu.clean_out_workspace(lastMosaicDir)
                            lu.delete_dir(lastMosaicDir)
                            
                        lastMosaicRaster = tempMosaicRaster
                        if cfg.WRITE_TRIM_RASTERS:
                            lastMosaicRasterTrim = tempMosaicRasterTrim             
                        if cfg.WRITE_PCT_RASTERS:
                            mosPctFN = 'mos_temp_pct'
                            mosaicDirPct = path.join(cfg.SCRATCHDIR,'mosP'+str(radId)+'_'+str(x+1)) 
                            lu.create_dir(mosaicDirPct)                            
                            tempMosaicRasterPct = path.join(mosaicDirPct,mosPctFN)
                            if linkLoop == 1:
                                # If this is the first grid then copy 
                                # rather than mosaic
                                if cfg.SUM_BARRIERS:
                                    outCon = arcpy.sa.Con(Raster(barrierRasterPct) 
                                        < 0, 0, arcpy.sa.Con(IsNull(
                                        barrierRasterPct), 0, barrierRasterPct)) 
                                    outCon.save(tempMosaicRasterPct)
                                else:
                                    arcpy.CopyRaster_management(barrierRasterPct, 
                                                             tempMosaicRasterPct)
                                                
                            else:                
                                if cfg.SUM_BARRIERS:
                                    @retry(10)
                                    def sumBarriers():
                                        randomerror()
                                        outCon = arcpy.sa.Con(Raster(barrierRasterPct) < 0, 
                                            lastMosaicRasterPct, Raster(barrierRasterPct) + Raster(
                                            lastMosaicRasterPct))
                                        outCon.save(tempMosaicRasterPct)
                                    sumBarriers()
                                else:
                                    rasterString = ('"' + barrierRasterPct + ";" + 
                                                    lastMosaicRasterPct + '"')
                                    @retry(10)
                                    def maxBarriers():
                                        randomerror()
                                        arcpy.MosaicToNewRaster_management(
                                            rasterString,mosaicDirPct,mosPctFN, "", 
                                            "32_BIT_FLOAT", arcpy.env.cellSize, "1", 
                                            "MAXIMUM", "MATCH")
                                    maxBarriers()
                                    
                            if linkLoop>1: #Clean up from previous loop
                                lu.delete_data(lastMosaicRasterPct)
                                lastMosaicDirPct =path.dirname(lastMosaicRasterPct) 
                                lu.clean_out_workspace(lastMosaicDirPct)
                                lu.delete_dir(lastMosaicDirPct)
                            
                            # lu.delete_data(lastMosaicRasterPct)
                            lastMosaicRasterPct = tempMosaicRasterPct                    
                        
                        if not cfg.SAVEBARRIERRASTERS:
                            lu.delete_data(barrierRaster)
                            if cfg.WRITE_PCT_RASTERS:
                                lu.delete_data(barrierRasterPct)
                            if cfg.WRITE_TRIM_RASTERS:                                                    
                                lu.delete_data(trmRaster)                            
                            
                            
                        # Temporarily disable links in linktable -
                        # don't want to mosaic them twice
                        for y in range (x+1,numLinks):
                            corex1 = int(coreList[y,0])
                            corey1 = int(coreList[y,1])
                            if corex1 == corex and corey1 == corey:
                                linkTable[y,cfg.LTB_LINKTYPE] = (
                                    linkTable[y,cfg.LTB_LINKTYPE] + 1000)
                            elif corex1==corey and corey1==corex:
                                linkTable[y,cfg.LTB_LINKTYPE] = (
                                    linkTable[y,cfg.LTB_LINKTYPE] + 1000)               

                if numCorridorLinks > 1 and pctDone < 100:
                    gprint('100 percent done')
                gprint('Summarizing barrier data for search radius.')
                #rows that were temporarily disabled
                rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE]>1000)
                linkTable[rows,cfg.LTB_LINKTYPE] = (
                    linkTable[rows,cfg.LTB_LINKTYPE] - 1000)

                # -----------------------------------------------------------------
                
                # Set negative values to null or zero and write geodatabase. 
                mosaicFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" + 
                           str(radius))
                mosaicRaster = path.join(cfg.BARRIERGDB, mosaicFN) 
                arcpy.env.extent = cfg.RESRAST
                
                # if setCoresToNull:                
                    # outCon = arcpy.sa.Con(Raster(tempMosaicRaster) < 0, 0, 
                                   # tempMosaicRaster) #xxx
                    # outCon.save(mosaicRaster) #xxx                            
                # else:
                outSetNull = arcpy.sa.SetNull(tempMosaicRaster, tempMosaicRaster,
                                              "VALUE < 0") #xxx orig
                outSetNull.save(mosaicRaster)
                
                lu.delete_data(tempMosaicRaster)
                
                if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                    mosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix + 
                                "_Rad" + str(radius))
                    mosaicRasterTrim = path.join(cfg.BARRIERGDB, mosaicFN)
                    arcpy.CopyRaster_management(tempMosaicRasterTrim, 
                                                            mosaicRasterTrim)
                    lu.delete_data(tempMosaicRaster)
                            
                if cfg.WRITE_PCT_RASTERS:                        
                    # Do same for percent raster
                    mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + 
                                   "_Rad" + str(radius))
                    arcpy.env.extent = cfg.RESRAST
                    outSetNull = arcpy.sa.SetNull(tempMosaicRasterPct, 
                                                  tempMosaicRasterPct, "VALUE < 0")
                    mosaicRasterPct = path.join(cfg.BARRIERGDB, mosaicPctFN)
                    outSetNull.save(mosaicRasterPct)
                    lu.delete_data(tempMosaicRasterPct)
                           
                
                # 'Grow out' maximum restoration gain to
                # neighborhood size for display
                InNeighborhood = "CIRCLE " + str(outerRadius) + " MAP"
                # Execute FocalStatistics
                fillRasterFN = "barriers_fill" + str(outerRadius) + tif
                fillRaster = path.join(cfg.BARRIERBASEDIR, fillRasterFN)
                outFocalStats = arcpy.sa.FocalStatistics(mosaicRaster,
                                                InNeighborhood, "MAXIMUM","DATA")
                outFocalStats.save(fillRaster)

                if cfg.WRITE_PCT_RASTERS:
                    # Do same for percent raster
                    fillRasterPctFN = "barriers_fill_pct" + str(outerRadius) + tif
                    fillRasterPct = path.join(cfg.BARRIERBASEDIR, fillRasterPctFN)
                    outFocalStats = arcpy.sa.FocalStatistics(mosaicRasterPct,
                                                InNeighborhood, "MAXIMUM","DATA")
                    outFocalStats.save(fillRasterPct)
                

                #Place copies of filled rasters in output geodatabase
                arcpy.env.workspace = cfg.BARRIERGDB
                fillRasterFN = (PREFIX + "_BarrrierCircles" + sumSuffix + "_Rad" + 
                                str(outerRadius))
                arcpy.CopyRaster_management(fillRaster, fillRasterFN) 
                if cfg.WRITE_PCT_RASTERS:
                    fillRasterPctFN = (PREFIX + "_BarrrierCircles_Pct" + sumSuffix + 
                                      "_Rad" + str(outerRadius))
                    arcpy.CopyRaster_management(fillRasterPct, fillRasterPctFN) 

                if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                    # Create pared-down version of filled raster- remove pixels 
                    # that don't need restoring by allowing a pixel to only 
                    # contribute its resistance value to restoration gain
                    outRasterFN = "barriers_trm" + str(outerRadius) + tif
                    outRaster = path.join(cfg.BARRIERBASEDIR,outRasterFN)
                    rasterList = [fillRaster, resistFillRaster]
                    outCellStatistics = arcpy.sa.CellStatistics(rasterList, 
                                                                "MINIMUM")
                    outCellStatistics.save(outRaster)

                    #SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER
                    outRaster2FN = ("barriers_trm"  + sumSuffix + str(outerRadius) 
                                   + "_2" + tif)
                    outRaster2 = path.join(cfg.BARRIERBASEDIR,outRaster2FN)
                    output = arcpy.sa.Con(IsNull(fillRaster),fillRaster,outRaster)
                    output.save(outRaster2)
                    outRasterFN = (PREFIX + "_BarrierCircles_RBMin"  + sumSuffix + 
                                  "_Rad" + str(outerRadius))

                    outRasterPath= path.join(cfg.BARRIERGDB, outRasterFN)
                    arcpy.CopyRaster_management(outRaster2, outRasterFN)
                randomerror()
                startTime=lu.elapsed_time(startTime)
            
            # Call the above function
            doRadiusLoop()
        
        # Combine rasters across radii
        gprint('\nCreating summary rasters...')
        if startRadius != endRadius:
            radiiSuffix = ('_Rad' + str(int(startRadius)) + 'To' + str(int(
                            endRadius)) + 'Step' + str(int(radiusStep)))
            mosaicFN = "bar_radii" 
            mosaicPctFN = "bar_radii_pct" 
            arcpy.env.workspace = cfg.BARRIERBASEDIR
            for radius in range (startRadius, endRadius + 1, radiusStep):
                #Fixme: run speed test with gdb mosaicking above and here
                radiusFN = (PREFIX + "_BarrierCenters" + sumSuffix + "_Rad" 
                           + str(radius))
                radiusRaster = path.join(cfg.BARRIERGDB, radiusFN)

                if radius == startRadius:
                #If this is the first grid then copy rather than mosaic
                    arcpy.CopyRaster_management(radiusRaster, mosaicFN)
                else:
                    mosaicRaster = path.join(cfg.BARRIERBASEDIR,mosaicFN)
                    arcpy.Mosaic_management(radiusRaster, mosaicRaster,
                                         "MAXIMUM", "MATCH")
            
                if cfg.WRITE_PCT_RASTERS:                         
                    radiusPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + 
                                   "_Rad" + str(radius))
                    radiusRasterPct = path.join(cfg.BARRIERGDB, radiusPctFN)

                    if radius == startRadius:
                    #If this is the first grid then copy rather than mosaic
                        arcpy.CopyRaster_management(radiusRasterPct, 
                                                    mosaicPctFN)
                    else:
                        mosaicRasterPct = path.join(cfg.BARRIERBASEDIR,
                                                    mosaicPctFN)
                        arcpy.Mosaic_management(radiusRasterPct, 
                                                mosaicRasterPct,
                                                "MAXIMUM", "MATCH")
                                         
            # Copy results to output geodatabase
            arcpy.env.workspace = cfg.BARRIERGDB
            mosaicFN = PREFIX + "_BarrierCenters" + sumSuffix + radiiSuffix
            arcpy.CopyRaster_management(mosaicRaster, mosaicFN)

            if cfg.WRITE_PCT_RASTERS:            
                mosaicPctFN = (PREFIX + "_BarrierCenters_Pct" + sumSuffix + 
                              radiiSuffix)
                arcpy.CopyRaster_management(mosaicRasterPct, mosaicPctFN)
                      
            
            #GROWN OUT rasters
            fillMosaicFN = "barriers_radii_fill" + tif
            fillMosaicPctFN = "barriers_radii_fill_pct" + tif
            fillMosaicRaster = path.join(cfg.BARRIERBASEDIR,fillMosaicFN)
            fillMosaicRasterPct = path.join(cfg.BARRIERBASEDIR,fillMosaicPctFN)
            
            arcpy.env.workspace = cfg.BARRIERBASEDIR
            for radius in range (startRadius, endRadius + 1, radiusStep):
                radiusFN = "barriers_fill" + str(radius) + tif
                #fixme- do this when only a single radius too
                radiusRaster = path.join(cfg.BARRIERBASEDIR, radiusFN)
                if radius == startRadius:
                #If this is the first grid then copy rather than mosaic
                    arcpy.CopyRaster_management(radiusRaster, fillMosaicFN)
                else:
                    arcpy.Mosaic_management(radiusRaster, fillMosaicRaster,
                                         "MAXIMUM", "MATCH")
                                         
                if cfg.WRITE_PCT_RASTERS:
                    radiusPctFN = "barriers_fill_pct" + str(radius) + tif
                    #fixme- do this when only a single radius too
                    radiusRasterPct = path.join(cfg.BARRIERBASEDIR, 
                                                radiusPctFN)
                    if radius == startRadius:
                    #If this is the first grid then copy rather than mosaic
                        arcpy.CopyRaster_management(radiusRasterPct, 
                                                    fillMosaicPctFN)
                    else:
                        arcpy.Mosaic_management(radiusRasterPct, 
                                                fillMosaicRasterPct,
                                                "MAXIMUM", "MATCH")
                                         
            # Copy result to output geodatabase
            arcpy.env.workspace = cfg.BARRIERGDB
            fillMosaicFN = PREFIX + "_BarrierCircles" + sumSuffix + radiiSuffix
            arcpy.CopyRaster_management(fillMosaicRaster, fillMosaicFN)
            if cfg.WRITE_PCT_RASTERS:
                fillMosaicPctFN = (PREFIX + "_BarrierCircles_Pct" + sumSuffix 
                                  + radiiSuffix)
                arcpy.CopyRaster_management(fillMosaicRasterPct, 
                                            fillMosaicPctFN)
            
#            if not cfg.SUM_BARRIERS:
                #GROWN OUT AND TRIMMED rasters (Can't do percent)
            if cfg.WRITE_TRIM_RASTERS:
                trimMosaicFN = "bar_radii_trm"
                arcpy.env.workspace = cfg.BARRIERBASEDIR
                trimMosaicRaster = path.join(cfg.BARRIERBASEDIR,trimMosaicFN)
                for radius in range (startRadius, endRadius + 1, radiusStep):
                    radiusFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix  
                            + "_Rad" + str(radius))
                    #fixme- do this when only a single radius too
                    radiusRaster = path.join(cfg.BARRIERGDB, radiusFN)

                    if radius == startRadius:
                    #If this is the first grid then copy rather than mosaic
                        arcpy.CopyRaster_management(radiusRaster, trimMosaicFN)
                    else:
                        arcpy.Mosaic_management(radiusRaster, trimMosaicRaster,
                                             "MAXIMUM", "MATCH")
                # Copy result to output geodatabase
                arcpy.env.workspace = cfg.BARRIERGDB
                trimMosaicFN = (PREFIX + "_BarrierCircles_RBMin" + sumSuffix 
                                + radiiSuffix)
                arcpy.CopyRaster_management(trimMosaicRaster, trimMosaicFN)
        
        if not cfg.SAVE_RADIUS_RASTERS:
            arcpy.env.workspace = cfg.BARRIERGDB
            rasters = arcpy.ListRasters()
            for raster in rasters:
                if 'rad' in raster.lower() and not 'step' in raster.lower():
                    lu.delete_data(raster)
                
                
                
        arcpy.env.workspace = cfg.BARRIERGDB
        rasters = arcpy.ListRasters()
        for raster in rasters:
            gprint('\nBuilding output statistics and pyramids\n'
                        'for raster ' + raster)
            lu.build_stats(raster)

        #Clean up temporary files and directories
        if not cfg.SAVEBARRIERRASTERS:
            lu.delete_dir(cbarrierdir)
            lu.delete_dir(cfg.BARRIERBASEDIR)

        if not cfg.SAVEFOCALRASTERS:
            for radius in range(startRadius, endRadius + 1, radiusStep):
                core1path = lu.get_focal_path(1,radius)
                path1, dir1 = path.split(core1path)
                path2, dir2 = path.split(path1)
                lu.delete_dir(path2)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 6. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 6. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 23
0
def STEP7_calc_centrality():
    """ Analyze network centrality using Circuitscape
        given Linkage Mapper outputs

    """
    try:
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)

        arcpy.env.workspace = cfg.SCRATCHDIR

        # Check for valid LCP shapefile
        prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep=7)
        if not arcpy.Exists(prevLcpShapefile):
            msg = ('Cannot find an LCP shapefile from step 5.  Please '
                   'rerun that step and any previous ones if necessary.')
            lu.raise_error(msg)

        # Remove lcp shapefile from this step if run previously
        lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s7.shp")
        lu.delete_data(lcpShapefile)

        invalidFNs = ['fid', 'id', 'oid', 'shape']
        if cfg.COREFN.lower() in invalidFNs:
            #if cfg.COREFN == 'FID' or cfg.COREFN == 'ID':
            lu.dashline(1)
            msg = ('ERROR: Core area field names ID, FID, SHAPE, and OID are'
                   ' reserved for ArcGIS. \nPlease choose another field- must'
                   ' be a positive integer.')
            lu.raise_error(msg)

        lu.dashline(1)
        gprint('Mapping centrality of network cores and links'
               '\nusing Circuitscape....')
        lu.dashline(0)

        # set the analysis extent and cell size to that of the resistance
        # surface
        coreCopy = path.join(cfg.SCRATCHDIR, 'cores.shp')
        arcpy.CopyFeatures_management(cfg.COREFC, coreCopy)
        if not arcpy.ListFields(coreCopy, "CF_Central"):
            arcpy.AddField_management(coreCopy, "CF_Central", "DOUBLE")

        inLinkTableFile = lu.get_prev_step_link_table(step=7)
        linkTable = lu.load_link_table(inLinkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg = ('\nThere are no linkages. Bailing.')
            lu.raise_error(msg)

        if linkTable.shape[1] < 16:  # If linktable has no entries from prior
            # centrality or pinchpint analyses
            extraCols = npy.zeros((numLinks, 6), dtype=npy.float64)
            linkTable = linkTable[:, 0:10]
            linkTable = npy.append(linkTable, extraCols, axis=1)
            linkTable[:, cfg.LTB_LCPLEN] = -1
            linkTable[:, cfg.LTB_CWDEUCR] = -1
            linkTable[:, cfg.LTB_CWDPATHR] = -1
            linkTable[:, cfg.LTB_EFFRESIST] = -1
            linkTable[:, cfg.LTB_CWDTORR] = -1
            del extraCols

        linkTable[:, cfg.LTB_CURRENT] = -1

        coresToProcess = npy.unique(linkTable[:,
                                              cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        maxCoreNum = max(coresToProcess)
        del coresToProcess

        lu.dashline(0)

        coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]
        coreList = npy.sort(coreList)

        # set up directory for centrality
        INCENTRALITYDIR = cfg.CENTRALITYBASEDIR
        OUTCENTRALITYDIR = path.join(cfg.CENTRALITYBASEDIR,
                                     cfg.CIRCUITOUTPUTDIR_NM)
        CONFIGDIR = path.join(INCENTRALITYDIR, cfg.CIRCUITCONFIGDIR_NM)

        # Set Circuitscape options and write config file
        options = lu.set_cs_options()
        options['data_type'] = 'network'
        options['habitat_file'] = path.join(INCENTRALITYDIR,
                                            'Circuitscape_graph.txt')
        # Setting point file equal to graph to do all pairs in Circuitscape
        options['point_file'] = path.join(INCENTRALITYDIR,
                                          'Circuitscape_graph.txt')
        outputFN = 'Circuitscape_network.out'
        options['output_file'] = path.join(OUTCENTRALITYDIR, outputFN)
        configFN = 'Circuitscape_network.ini'
        outConfigFile = path.join(CONFIGDIR, configFN)
        lu.write_cs_cfg_file(outConfigFile, options)

        delRows = npy.asarray(npy.where(linkTable[:, cfg.LTB_LINKTYPE] < 1))
        delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32")
        delRowsVector[:] = delRows[0, :]
        LT = lu.delete_row(linkTable, delRowsVector)
        del delRows
        del delRowsVector
        graphList = npy.zeros((LT.shape[0], 3), dtype=npy.float64)
        graphList[:, 0] = LT[:, cfg.LTB_CORE1]
        graphList[:, 1] = LT[:, cfg.LTB_CORE2]
        graphList[:, 2] = LT[:, cfg.LTB_CWDIST]

        write_graph(options['habitat_file'], graphList)
        gprint('\nCalculating current flow centrality using Circuitscape...')

        memFlag = lu.call_circuitscape(cfg.CSPATH, outConfigFile)

        outputFN = 'Circuitscape_network_branch_currents_cum.txt'
        currentList = path.join(OUTCENTRALITYDIR, outputFN)

        if not arcpy.Exists(currentList):
            write_graph(options['habitat_file'], graphList)
            gprint('\nCalculating current flow centrality using Circuitscape '
                   '(2nd try)...')
            memFlag = lu.call_circuitscape(cfg.CSPATH, outConfigFile)
            if not arcpy.Exists(currentList):
                lu.dashline(1)
                msg = ('ERROR: No Circuitscape output found.\n'
                       'It looks like Circuitscape failed.')
                arcpy.AddError(msg)
                lu.write_log(msg)
                exit(1)

        currents = load_graph(currentList,
                              graphType='graph/network',
                              datatype=npy.float64)

        numLinks = currents.shape[0]
        for x in range(0, numLinks):
            corex = currents[x, 0]
            corey = currents[x, 1]

            #linkId = LT[x,cfg.LTB_LINKID]
            row = lu.get_links_from_core_pairs(linkTable, corex, corey)
            #row = lu.get_linktable_row(linkId, linkTable)
            linkTable[row, cfg.LTB_CURRENT] = currents[x, 2]

        coreCurrentFN = 'Circuitscape_network_node_currents_cum.txt'
        nodeCurrentList = path.join(OUTCENTRALITYDIR, coreCurrentFN)
        nodeCurrents = load_graph(nodeCurrentList,
                                  graphType='graph/network',
                                  datatype=npy.float64)

        numNodeCurrents = nodeCurrents.shape[0]
        rows = arcpy.UpdateCursor(coreCopy)
        row = rows.newRow()
        for row in rows:
            coreID = row.getValue(cfg.COREFN)
            for i in range(0, numNodeCurrents):
                if coreID == nodeCurrents[i, 0]:
                    row.setValue("CF_Central", nodeCurrents[i, 1])
                    break
            rows.updateRow(row)
            #row = rows.newRow()
        del row, rows
        gprint('Done with centrality calculations.')

        finalLinkTable = lu.update_lcp_shapefile(linkTable,
                                                 lastStep=5,
                                                 thisStep=7)
        linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv")
        lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile)
        linkTableFinalFile = path.join(cfg.OUTPUTDIR,
                                       cfg.PREFIX + "_linkTable_s5_plus.csv")
        lu.write_link_table(finalLinkTable, linkTableFinalFile,
                            inLinkTableFile)
        gprint('Copy of final linkTable written to ' + linkTableFinalFile)

        finalCoreFile = path.join(cfg.CORECENTRALITYGDB, cfg.PREFIX + '_Cores')
        #copy core area map to gdb.
        if not arcpy.Exists(cfg.CORECENTRALITYGDB):
            arcpy.CreateFileGDB_management(
                cfg.OUTPUTDIR, path.basename(cfg.CORECENTRALITYGDB))
        arcpy.CopyFeatures_management(coreCopy, finalCoreFile)

        gprint('Creating shapefiles with linework for links.')
        lu.write_link_maps(linkTableFinalFile, step=7)

        # Copy final link maps to gdb and clean up.
        lu.copy_final_link_maps(step=7)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 7. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Failed in step 7. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Exemplo n.º 24
0
def step6_calc_barriers():
    """Detect influential barriers given CWD calculations from Step 3."""
    try:
        arcpy.CheckOutExtension("spatial")
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)

        if cfg.BARRIER_CWD_THRESH is not None:
            lu.dashline(1)
            gprint('Invoking CWD Threshold of ' + str(cfg.BARRIER_CWD_THRESH)
                   + ' map units.')

        if cfg.SUM_BARRIERS:
            sum_suffix = '_Sum'
            cfg.BARRIERBASEDIR = cfg.BARRIERBASEDIR + sum_suffix
            base_name, extension = path.splitext(cfg.BARRIERGDB)
            cfg.BARRIERGDB = base_name + sum_suffix + extension

            gprint('\nBarrier scores will be SUMMED across core pairs.')
        else:
            sum_suffix = ''

        if not arcpy.Exists(cfg.BARRIERGDB):
            # Create output geodatabase
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR,
                                           path.basename(cfg.BARRIERGDB))

        start_radius = int(cfg.STARTRADIUS)
        end_radius = int(cfg.ENDRADIUS)
        radius_step = int(cfg.RADIUSSTEP)
        if radius_step == 0:
            end_radius = start_radius  # Calculate at just one radius value
            radius_step = 1

        link_table_file = lu.get_prev_step_link_table(step=6)
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        prefix = path.basename(cfg.PROJECTDIR)
        # For speed:
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # set the analysis extent and cell size to that of the resistance
        # surface
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = arcpy.Describe(cfg.RESRAST).MeanCellHeight
        arcpy.env.snapRaster = cfg.RESRAST
        spatialref = arcpy.Describe(cfg.RESRAST).spatialReference
        map_units = (str(spatialref.linearUnitName)).lower()
        if len(map_units) > 1 and map_units[-1] != 's':
            map_units = map_units + 's'

        if (float(arcpy.env.cellSize) > start_radius
                or start_radius > end_radius):
            msg = ('Error: minimum detection radius must be greater than '
                   'cell size (' + arcpy.env.cellSize +
                   ') \nand less than or equal to maximum detection radius.')
            lu.raise_error(msg)

        link_table = lu.load_link_table(link_table_file)
        num_links = link_table.shape[0]
        num_corridor_links = lu.report_links(link_table)
        if num_corridor_links == 0:
            lu.dashline(1)
            msg = '\nThere are no linkages. Bailing.'
            lu.raise_error(msg)

        # set up directories for barrier and barrier mosaic grids
        gprint("Creating intermediate output folder: " + cfg.BARRIERBASEDIR)
        lu.delete_dir(cfg.BARRIERBASEDIR)
        lu.create_dir(cfg.BARRIERBASEDIR)
        arcpy.CreateFolder_management(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM)
        cbarrierdir = path.join(cfg.BARRIERBASEDIR, cfg.BARRIERDIR_NM)

        cores_to_process = npy.unique(
            link_table[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        max_core_num = max(cores_to_process)

        # Set up focal directories.
        # To keep there from being > 100 grids in any one directory,
        # outputs are written to:
        # barrier\focalX_ for cores 1-99 at radius X
        # barrier\focalX_1 for cores 100-199
        # etc.
        lu.dashline(0)

        for radius in range(start_radius, end_radius + 1, radius_step):
            core1path = lu.get_focal_path(1, radius)
            path1 = path.split(core1path)[0]
            path2, dir2 = path.split(path1)
            arcpy.CreateFolder_management(path.dirname(path2),
                                          path.basename(path2))
            arcpy.CreateFolder_management(path.dirname(path1),
                                          path.basename(path1))

            if max_core_num > 99:
                gprint('Creating subdirectories for ' + str(radius) + ' ' +
                       str(map_units) + ' radius analysis scale.')
                focal_dir_base_name = dir2

                cp100 = cores_to_process.astype('int32') // 100
                ind = npy.where(cp100 > 0)
                dir_nums = npy.unique(cp100[ind])
                for dir_num in dir_nums:
                    focal_dir = focal_dir_base_name + str(dir_num)
                    gprint('...' + focal_dir)
                    arcpy.CreateFolder_management(path2, focal_dir)

        # Create resistance raster with filled-in Nodata values for later use
        arcpy.env.extent = cfg.RESRAST
        resist_fill_ras = path.join(cfg.SCRATCHDIR, "resist_fill")
        output = arcpy.sa.Con(arcpy.sa.IsNull(cfg.RESRAST), 1000000000,
                              arcpy.sa.Raster(cfg.RESRAST) - 1)
        output.save(resist_fill_ras)

        core_list = link_table[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]
        core_list = npy.sort(core_list)

        # Loop through each search radius to calculate barriers in each link
        rad_id = 0  # Keep track of no of radii processed - used for temp dir
        for radius in range(start_radius, end_radius + 1, radius_step):
            rad_id = rad_id + 1
            link_table_tmp = link_table.copy()

            @Retry(10)
            # Can't pass vars in and modify them.
            def do_radius_loop():
                """Do radius loop."""
                link_table = link_table_tmp.copy()
                start_time = time.clock()
                link_loop = 0
                pct_done = 0
                gprint('\nMapping barriers at a radius of ' + str(radius) +
                       ' ' + str(map_units))
                if cfg.SUM_BARRIERS:
                    gprint('using SUM method')
                else:
                    gprint('using MAXIMUM method')
                if num_corridor_links > 1:
                    gprint('0 percent done')
                last_mosaic_ras = None
                last_mosaic_ras_pct = None
                for x in range(0, num_links):
                    pct_done = lu.report_pct_done(
                        link_loop, num_corridor_links, pct_done)
                    if ((link_table[x, cfg.LTB_LINKTYPE] > 0) and
                            (link_table[x, cfg.LTB_LINKTYPE] < 1000)):
                        link_loop = link_loop + 1
                        # source and target cores
                        corex = int(core_list[x, 0])
                        corey = int(core_list[x, 1])

                        # Get cwd rasters for source and target cores
                        cwd_ras1 = lu.get_cwd_path(corex)
                        cwd_ras2 = lu.get_cwd_path(corey)

                        # Mask out areas above CWD threshold
                        cwd_tmp1 = None
                        cwd_tmp2 = None
                        if cfg.BARRIER_CWD_THRESH is not None:
                            if x == 1:
                                lu.dashline(1)
                                gprint('  Using CWD threshold of '
                                       + str(cfg.BARRIER_CWD_THRESH)
                                       + ' map units.')
                            arcpy.env.extent = cfg.RESRAST
                            arcpy.env.cellSize = cfg.RESRAST
                            arcpy.env.snapRaster = cfg.RESRAST
                            cwd_tmp1 = path.join(cfg.SCRATCHDIR,
                                                 "tmp" + str(corex))
                            out_con = arcpy.sa.Con(
                                cwd_ras1 < float(cfg.BARRIER_CWD_THRESH),
                                cwd_ras1)
                            out_con.save(cwd_tmp1)
                            cwd_ras1 = cwd_tmp1
                            cwd_tmp2 = path.join(cfg.SCRATCHDIR,
                                                 "tmp" + str(corey))
                            out_con = arcpy.sa.Con(
                                cwd_ras2 < float(cfg.BARRIER_CWD_THRESH),
                                cwd_ras2)
                            out_con.save(cwd_tmp2)
                            cwd_ras2 = cwd_tmp2

                        focal_ras1 = lu.get_focal_path(corex, radius)
                        focal_ras2 = lu.get_focal_path(corey, radius)

                        link = lu.get_links_from_core_pairs(link_table,
                                                            corex, corey)
                        lc_dist = float(link_table[link, cfg.LTB_CWDIST])

                        # Detect barriers at radius using neighborhood stats
                        # Create the Neighborhood Object
                        inner_radius = radius - 1
                        outer_radius = radius

                        dia = 2 * radius
                        in_neighborhood = ("ANNULUS " + str(inner_radius)
                                           + " " + str(outer_radius) + " MAP")

                        @Retry(10)
                        def exec_focal():
                            """Execute focal statistics."""
                            if not path.exists(focal_ras1):
                                arcpy.env.extent = cwd_ras1
                                out_focal_stats = arcpy.sa.FocalStatistics(
                                    cwd_ras1, in_neighborhood,
                                    "MINIMUM", "DATA")
                                if SET_CORES_TO_NULL:
                                    # Set areas overlapping cores to NoData xxx
                                    out_focal_stats2 = arcpy.sa.Con(
                                        out_focal_stats > 0, out_focal_stats)
                                    out_focal_stats2.save(focal_ras1)
                                else:
                                    out_focal_stats.save(focal_ras1)
                                arcpy.env.extent = cfg.RESRAST

                            if not path.exists(focal_ras2):
                                arcpy.env.extent = cwd_ras2
                                out_focal_stats = arcpy.sa.FocalStatistics(
                                    cwd_ras2, in_neighborhood,
                                    "MINIMUM", "DATA")
                                if SET_CORES_TO_NULL:
                                    # Set areas overlapping cores to NoData xxx
                                    out_focal_stats2 = arcpy.sa.Con(
                                        out_focal_stats > 0, out_focal_stats)
                                    out_focal_stats2.save(focal_ras2)
                                else:
                                    out_focal_stats.save(focal_ras2)
                                arcpy.env.extent = cfg.RESRAST
                        exec_focal()

                        lu.delete_data(cwd_tmp1)
                        lu.delete_data(cwd_tmp2)

                        barrier_ras = path.join(
                            cbarrierdir, "b" + str(radius) + "_" + str(corex)
                            + "_" + str(corey)+'.tif')

                        # Need to set nulls to 0,
                        # also create trim rasters as we go
                        if cfg.SUM_BARRIERS:
                            out_ras = ((lc_dist - arcpy.sa.Raster(focal_ras1) -
                                        arcpy.sa.Raster(focal_ras2) - dia)
                                       / dia)
                            out_con = arcpy.sa.Con(arcpy.sa.IsNull(out_ras),
                                                   0, out_ras)
                            out_con2 = arcpy.sa.Con(out_con < 0, 0, out_con)
                            out_con2.save(barrier_ras)

                            # Execute FocalStatistics to fill out search radii
                            in_neighborhood = ("CIRCLE " + str(outer_radius)
                                               + " MAP")
                            fill_ras = path.join(
                                cbarrierdir, "b" + str(radius) + "_"
                                + str(corex) + "_" + str(corey) + "_fill.tif")
                            out_focal_stats = arcpy.sa.FocalStatistics(
                                barrier_ras, in_neighborhood,
                                "MAXIMUM", "DATA")
                            out_focal_stats.save(fill_ras)

                            if cfg.WRITE_TRIM_RASTERS:
                                trm_ras = path.join(
                                    cbarrierdir, "b" + str(radius) + "_"
                                    + str(corex) + "_" + str(corey)
                                    + "_trim.tif")
                                ras_list = [fill_ras, resist_fill_ras]
                                out_cell_statistics = arcpy.sa.CellStatistics(
                                    ras_list, "MINIMUM")
                                out_cell_statistics.save(trm_ras)

                        else:

                            @Retry(10)
                            def clac_ben():
                                """Calculate potential benefit.

                                Calculate potential benefit per map unit
                                restored.
                                """
                                out_ras = (
                                    (lc_dist - arcpy.sa.Raster(focal_ras1)
                                     - arcpy.sa.Raster(focal_ras2) - dia)
                                    / dia)
                                out_ras.save(barrier_ras)
                            clac_ben()

                        if cfg.WRITE_PCT_RASTERS:
                            # Calculate % potential benefit per unit restored
                            barrier_ras_pct = path.join(
                                cbarrierdir, "b" + str(radius) + "_"
                                + str(corex) + "_" + str(corey)
                                + '_pct.tif')

                            @Retry(10)
                            def calc_ben_pct():
                                """Calc benefit percentage."""
                                outras = (100 * (arcpy.sa.Raster(barrier_ras)
                                                 / lc_dist))
                                outras.save(barrier_ras_pct)
                            calc_ben_pct()

                        # Mosaic barrier results across core area pairs
                        mosaic_dir = path.join(cfg.SCRATCHDIR, 'mos'
                                               + str(rad_id) + '_'
                                               + str(x + 1))
                        lu.create_dir(mosaic_dir)

                        mos_fn = 'mos_temp'
                        tmp_mosaic_ras = path.join(mosaic_dir, mos_fn)
                        tmp_mosaic_ras_trim = path.join(mosaic_dir,
                                                        'mos_temp_trm')
                        arcpy.env.workspace = mosaic_dir
                        if link_loop == 1:
                            last_mosaic_ras_trim = None
                            # For first grid copy rather than mosaic
                            arcpy.CopyRaster_management(barrier_ras,
                                                        tmp_mosaic_ras)
                            if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                                arcpy.CopyRaster_management(
                                    trm_ras, tmp_mosaic_ras_trim)
                        else:
                            if cfg.SUM_BARRIERS:
                                out_con = arcpy.sa.Con(
                                    arcpy.sa.Raster(barrier_ras) < 0,
                                    last_mosaic_ras,
                                    arcpy.sa.Raster(barrier_ras)
                                    + arcpy.sa.Raster(last_mosaic_ras))
                                out_con.save(tmp_mosaic_ras)
                                if cfg.WRITE_TRIM_RASTERS:
                                    out_con = arcpy.sa.Con(
                                        arcpy.sa.Raster(trm_ras) < 0,
                                        last_mosaic_ras_trim,
                                        arcpy.sa.Raster(trm_ras)
                                        + arcpy.sa.Raster(last_mosaic_ras_trim)
                                        )
                                    out_con.save(tmp_mosaic_ras_trim)

                            else:
                                in_rasters = (";".join([barrier_ras,
                                                        last_mosaic_ras]))

                                @Retry(10)
                                def mosaic_to_new():
                                    """Mosaic to new raster."""
                                    arcpy.MosaicToNewRaster_management(
                                        input_rasters=in_rasters,
                                        output_location=mosaic_dir,
                                        raster_dataset_name_with_extension\
                                        =mos_fn,
                                        pixel_type="32_BIT_FLOAT",
                                        cellsize=arcpy.env.cellSize,
                                        number_of_bands="1",
                                        mosaic_method="MAXIMUM")
                                mosaic_to_new()

                        if link_loop > 1:  # Clean up from previous loop
                            lu.delete_data(last_mosaic_ras)
                            last_mosaic_dir = path.dirname(last_mosaic_ras)
                            lu.clean_out_workspace(last_mosaic_dir)
                            lu.delete_dir(last_mosaic_dir)

                        last_mosaic_ras = tmp_mosaic_ras
                        if cfg.WRITE_TRIM_RASTERS:
                            last_mosaic_ras_trim = tmp_mosaic_ras_trim
                        if cfg.WRITE_PCT_RASTERS:
                            mos_pct_fn = 'mos_temp_pct'
                            mosaic_dir_pct = path.join(cfg.SCRATCHDIR, 'mosP'
                                                       + str(rad_id) + '_'
                                                       + str(x+1))
                            lu.create_dir(mosaic_dir_pct)
                            tmp_mosaic_ras_pct = path.join(mosaic_dir_pct,
                                                           mos_pct_fn)
                            if link_loop == 1:
                                # If this is the first grid then copy
                                # rather than mosaic
                                if cfg.SUM_BARRIERS:
                                    out_con = arcpy.sa.Con(
                                        arcpy.sa.Raster(barrier_ras_pct)
                                        < 0, 0,
                                        arcpy.sa.Con(arcpy.sa.IsNull
                                                     (barrier_ras_pct),
                                                     0, barrier_ras_pct))
                                    out_con.save(tmp_mosaic_ras_pct)
                                else:
                                    arcpy.CopyRaster_management(
                                        barrier_ras_pct, tmp_mosaic_ras_pct)

                            else:
                                if cfg.SUM_BARRIERS:

                                    @Retry(10)
                                    def sum_barriers():
                                        """Sum barriers."""
                                        out_con = arcpy.sa.Con(
                                            arcpy.sa.Raster(barrier_ras_pct)
                                            < 0,
                                            last_mosaic_ras_pct,
                                            arcpy.sa.Raster(barrier_ras_pct)
                                            + arcpy.sa.Raster(
                                                last_mosaic_ras_pct))
                                        out_con.save(tmp_mosaic_ras_pct)
                                    sum_barriers()
                                else:
                                    in_rasters = (";".join([barrier_ras_pct,
                                                  last_mosaic_ras_pct]))

                                    @Retry(10)
                                    def max_barriers():
                                        """Get max barriers."""
                                        arcpy.MosaicToNewRaster_management(
                                            input_rasters=in_rasters,
                                            output_location=mosaic_dir_pct,
                                            raster_dataset_name_with_extension
                                            =mos_pct_fn,
                                            pixel_type="32_BIT_FLOAT",
                                            cellsize=arcpy.env.cellSize,
                                            number_of_bands="1",
                                            mosaic_method="MAXIMUM")
                                    max_barriers()

                            if link_loop > 1:  # Clean up from previous loop
                                lu.delete_data(last_mosaic_ras_pct)
                                last_mosaic_dir_pct = path.dirname(
                                    last_mosaic_ras_pct)
                                lu.clean_out_workspace(last_mosaic_dir_pct)
                                lu.delete_dir(last_mosaic_dir_pct)

                            last_mosaic_ras_pct = tmp_mosaic_ras_pct

                        if not cfg.SAVEBARRIERRASTERS:
                            lu.delete_data(barrier_ras)
                            if cfg.WRITE_PCT_RASTERS:
                                lu.delete_data(barrier_ras_pct)
                            if cfg.WRITE_TRIM_RASTERS:
                                lu.delete_data(trm_ras)

                        # Temporarily disable links in linktable -
                        # don't want to mosaic them twice
                        for y in range(x + 1, num_links):
                            corex1 = int(core_list[y, 0])
                            corey1 = int(core_list[y, 1])
                            if corex1 == corex and corey1 == corey:
                                link_table[y, cfg.LTB_LINKTYPE] = (
                                    link_table[y, cfg.LTB_LINKTYPE] + 1000)
                            elif corex1 == corey and corey1 == corex:
                                link_table[y, cfg.LTB_LINKTYPE] = (
                                    link_table[y, cfg.LTB_LINKTYPE] + 1000)

                if num_corridor_links > 1 and pct_done < 100:
                    gprint('100 percent done')
                gprint('Summarizing barrier data for search radius.')
                # Rows that were temporarily disabled
                rows = npy.where(link_table[:, cfg.LTB_LINKTYPE] > 1000)
                link_table[rows, cfg.LTB_LINKTYPE] = (
                    link_table[rows, cfg.LTB_LINKTYPE] - 1000)
                # -----------------------------------------------------------------
                # Set negative values to null or zero and write geodatabase.
                mosaic_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" +
                             str(radius))
                mosaic_ras = path.join(cfg.BARRIERGDB, mosaic_fn)
                arcpy.env.extent = cfg.RESRAST

                out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras,
                                                tmp_mosaic_ras,
                                                "VALUE < 0")  # xxx orig
                out_set_null.save(mosaic_ras)

                lu.delete_data(tmp_mosaic_ras)

                if cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                    mosaic_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix
                                 + "_Rad" + str(radius))
                    mosaic_ras_trim = path.join(cfg.BARRIERGDB, mosaic_fn)
                    arcpy.CopyRaster_management(tmp_mosaic_ras_trim,
                                                mosaic_ras_trim)
                    lu.delete_data(tmp_mosaic_ras)

                if cfg.WRITE_PCT_RASTERS:
                    # Do same for percent raster
                    mosaic_pct_fn = (prefix + "_BarrierCenters_Pct"
                                     + sum_suffix + "_Rad" + str(radius))
                    arcpy.env.extent = cfg.RESRAST
                    out_set_null = arcpy.sa.SetNull(tmp_mosaic_ras_pct,
                                                    tmp_mosaic_ras_pct,
                                                    "VALUE < 0")
                    mosaic_ras_pct = path.join(cfg.BARRIERGDB, mosaic_pct_fn)
                    out_set_null.save(mosaic_ras_pct)
                    lu.delete_data(tmp_mosaic_ras_pct)

                # 'Grow out' maximum restoration gain to
                # neighborhood size for display
                in_neighborhood = "CIRCLE " + str(outer_radius) + " MAP"
                # Execute FocalStatistics
                fill_ras_fn = "barriers_fill" + str(outer_radius) + TIF
                fill_ras = path.join(cfg.BARRIERBASEDIR, fill_ras_fn)
                out_focal_stats = arcpy.sa.FocalStatistics(
                    mosaic_ras, in_neighborhood, "MAXIMUM", "DATA")
                out_focal_stats.save(fill_ras)

                if cfg.WRITE_PCT_RASTERS:
                    # Do same for percent raster
                    fill_ras_pct_fn = (
                        "barriers_fill_pct" + str(outer_radius) + TIF)
                    fill_ras_pct = path.join(cfg.BARRIERBASEDIR,
                                             fill_ras_pct_fn)
                    out_focal_stats = arcpy.sa.FocalStatistics(
                        mosaic_ras_pct, in_neighborhood, "MAXIMUM", "DATA")
                    out_focal_stats.save(fill_ras_pct)

                # Place copies of filled rasters in output geodatabase
                arcpy.env.workspace = cfg.BARRIERGDB
                fill_ras_fn = (prefix + "_BarrrierCircles" + sum_suffix
                               + "_Rad" + str(outer_radius))
                arcpy.CopyRaster_management(fill_ras, fill_ras_fn)
                if cfg.WRITE_PCT_RASTERS:
                    fill_ras_pct_fn = (prefix + "_BarrrierCircles_Pct"
                                       + sum_suffix + "_Rad"
                                       + str(outer_radius))
                    arcpy.CopyRaster_management(fill_ras_pct,
                                                fill_ras_pct_fn)

                if not cfg.SUM_BARRIERS and cfg.WRITE_TRIM_RASTERS:
                    # Create pared-down version of filled raster- remove pixels
                    # that don't need restoring by allowing a pixel to only
                    # contribute its resistance value to restoration gain
                    out_ras_fn = "barriers_trm" + str(outer_radius) + TIF
                    out_ras = path.join(cfg.BARRIERBASEDIR, out_ras_fn)
                    ras_list = [fill_ras, resist_fill_ras]
                    out_cell_statistics = arcpy.sa.CellStatistics(ras_list,
                                                                  "MINIMUM")
                    out_cell_statistics.save(out_ras)

                    # SECOND ROUND TO CLIP BY DATA VALUES IN BARRIER RASTER
                    out_ras_2fn = ("barriers_trm" + sum_suffix
                                   + str(outer_radius) + "_2" + TIF)
                    out_ras2 = path.join(cfg.BARRIERBASEDIR, out_ras_2fn)
                    output = arcpy.sa.Con(arcpy.sa.IsNull(fill_ras),
                                          fill_ras, out_ras)
                    output.save(out_ras2)
                    out_ras_fn = (prefix + "_BarrierCircles_RBMin"
                                  + sum_suffix + "_Rad"
                                  + str(outer_radius))
                    arcpy.CopyRaster_management(out_ras2, out_ras_fn)
                start_time = lu.elapsed_time(start_time)

            # Call the above function
            do_radius_loop()

        # Combine rasters across radii
        gprint('\nCreating summary rasters...')
        if start_radius != end_radius:
            radii_suffix = ('_Rad' + str(int(start_radius)) + 'To'
                            + str(int(end_radius)) + 'Step'
                            + str(int(radius_step)))
            mosaic_fn = "bar_radii"
            mosaic_pct_fn = "bar_radii_pct"
            arcpy.env.workspace = cfg.BARRIERBASEDIR
            for radius in range(start_radius, end_radius + 1, radius_step):
                # Fixme: run speed test with gdb mosaicking above and here
                radius_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad"
                             + str(radius))
                radius_ras = path.join(cfg.BARRIERGDB, radius_fn)

                if radius == start_radius:
                    # If this is the first grid then copy rather than mosaic
                    arcpy.CopyRaster_management(radius_ras, mosaic_fn)
                else:
                    mosaic_ras = path.join(cfg.BARRIERBASEDIR, mosaic_fn)
                    arcpy.Mosaic_management(radius_ras, mosaic_ras,
                                            "MAXIMUM", "MATCH")

                if cfg.WRITE_PCT_RASTERS:
                    radius_pct_fn = (prefix + "_BarrierCenters_Pct"
                                     + sum_suffix + "_Rad" + str(radius))
                    radius_ras_pct = path.join(cfg.BARRIERGDB, radius_pct_fn)

                    if radius == start_radius:
                        # If this is the first grid then copy rather than
                        # mosaic
                        arcpy.CopyRaster_management(radius_ras_pct,
                                                    mosaic_pct_fn)
                    else:
                        mosaic_ras_pct = path.join(cfg.BARRIERBASEDIR,
                                                   mosaic_pct_fn)
                        arcpy.Mosaic_management(radius_ras_pct,
                                                mosaic_ras_pct,
                                                "MAXIMUM", "MATCH")

            # Copy results to output geodatabase
            arcpy.env.workspace = cfg.BARRIERGDB
            mosaic_fn = prefix + "_BarrierCenters" + sum_suffix + radii_suffix
            arcpy.CopyRaster_management(mosaic_ras, mosaic_fn)

            if cfg.WRITE_PCT_RASTERS:
                mosaic_pct_fn = (prefix + "_BarrierCenters_Pct" + sum_suffix +
                                 radii_suffix)
                arcpy.CopyRaster_management(mosaic_ras_pct, mosaic_pct_fn)

            # GROWN OUT rasters
            fill_mosaic_fn = "barriers_radii_fill" + TIF
            fill_mosaic_pct_fn = "barriers_radii_fill_pct" + TIF
            fill_mosaic_ras = path.join(cfg.BARRIERBASEDIR, fill_mosaic_fn)
            trim_mosaic_ras_pct = path.join(cfg.BARRIERBASEDIR,
                                            fill_mosaic_pct_fn)

            arcpy.env.workspace = cfg.BARRIERBASEDIR
            for radius in range(start_radius, end_radius + 1, radius_step):
                radius_fn = "barriers_fill" + str(radius) + TIF
                # fixme- do this when only a single radius too
                radius_ras = path.join(cfg.BARRIERBASEDIR, radius_fn)
                if radius == start_radius:
                    # If this is the first grid then copy rather than mosaic
                    arcpy.CopyRaster_management(radius_ras, fill_mosaic_fn)
                else:
                    arcpy.Mosaic_management(radius_ras, fill_mosaic_ras,
                                            "MAXIMUM", "MATCH")

                if cfg.WRITE_PCT_RASTERS:
                    radius_pct_fn = "barriers_fill_pct" + str(radius) + TIF
                    # fixme- do this when only a single radius too
                    radius_ras_pct = path.join(cfg.BARRIERBASEDIR,
                                               radius_pct_fn)
                    if radius == start_radius:
                        # For first grid copy rather than mosaic
                        arcpy.CopyRaster_management(radius_ras_pct,
                                                    fill_mosaic_pct_fn)
                    else:
                        arcpy.Mosaic_management(radius_ras_pct,
                                                trim_mosaic_ras_pct,
                                                "MAXIMUM", "MATCH")

            # Copy result to output geodatabase
            arcpy.env.workspace = cfg.BARRIERGDB
            fill_mosaic_fn = (prefix + "_BarrierCircles" + sum_suffix
                              + radii_suffix)
            arcpy.CopyRaster_management(fill_mosaic_ras, fill_mosaic_fn)
            if cfg.WRITE_PCT_RASTERS:
                fill_mosaic_pct_fn = (prefix + "_BarrierCircles_Pct"
                                      + sum_suffix + radii_suffix)
                arcpy.CopyRaster_management(trim_mosaic_ras_pct,
                                            fill_mosaic_pct_fn)

            # GROWN OUT AND TRIMMED rasters (Can't do percent)
            if cfg.WRITE_TRIM_RASTERS:
                trim_mosaic_fn = "bar_radii_trm"
                arcpy.env.workspace = cfg.BARRIERBASEDIR
                trim_mosaic_ras = path.join(cfg.BARRIERBASEDIR, trim_mosaic_fn)
                for radius in range(start_radius, end_radius + 1, radius_step):
                    radius_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix
                                 + "_Rad" + str(radius))
                    # fixme- do this when only a single radius too
                    radius_ras = path.join(cfg.BARRIERGDB, radius_fn)

                    if radius == start_radius:
                        # For first grid copy rather than mosaic
                        arcpy.CopyRaster_management(radius_ras, trim_mosaic_fn)
                    else:
                        arcpy.Mosaic_management(radius_ras, trim_mosaic_ras,
                                                "MAXIMUM", "MATCH")
                # Copy result to output geodatabase
                arcpy.env.workspace = cfg.BARRIERGDB
                trim_mosaic_fn = (prefix + "_BarrierCircles_RBMin" + sum_suffix
                                  + radii_suffix)
                arcpy.CopyRaster_management(trim_mosaic_ras, trim_mosaic_fn)

        if not cfg.SAVE_RADIUS_RASTERS:
            arcpy.env.workspace = cfg.BARRIERGDB
            rasters = arcpy.ListRasters()
            for raster in rasters:
                if 'rad' in raster.lower() and 'step' not in raster.lower():
                    lu.delete_data(raster)

        arcpy.env.workspace = cfg.BARRIERGDB
        rasters = arcpy.ListRasters()
        for raster in rasters:
            gprint('\nBuilding output statistics and pyramids\n'
                   'for raster ' + raster)
            lu.build_stats(raster)

        # Clean up temporary files and directories
        if not cfg.SAVEBARRIERRASTERS:
            lu.delete_dir(cbarrierdir)
            lu.delete_dir(cfg.BARRIERBASEDIR)

        if not cfg.SAVEFOCALRASTERS:
            for radius in range(start_radius, end_radius + 1, radius_step):
                core1path = lu.get_focal_path(1, radius)
                path1 = path.split(core1path)[0]
                path2 = path.split(path1)[0]
                lu.delete_dir(path2)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 6. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Failed in step 6. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return