Example #1
0
def cc_copy_inputs():
    """Clip Climate Linkage Mapper inputs to smallest extent"""
    lm_util.gprint("\nCOPYING LAYERS AND, IF NECESSARY, REDUCING EXTENT")
    ext_poly = "ext_poly"  # Extent polygon
    climate_extent = arcpy.Raster(cc_env.climate_rast).extent

    if cc_env.resist_rast is not None:
        resist_extent = arcpy.Raster(cc_env.resist_rast).extent
        xmin = max(climate_extent.XMin, resist_extent.XMin)
        ymin = max(climate_extent.YMin, resist_extent.YMin)
        xmax = min(climate_extent.XMax, resist_extent.XMax)
        ymax = min(climate_extent.YMax, resist_extent.YMax)

        # Set to minimum extent if resistance raster was given
        arcpy.env.extent = arcpy.Extent(xmin, ymin, xmax, ymax)

        # Want climate and resistance rasters in same spatial ref
        # with same nodata cells
        proj_resist_rast = sa.Con(sa.IsNull(cc_env.climate_rast),
                                  sa.Int(cc_env.climate_rast),
                                  cc_env.resist_rast)
        proj_resist_rast.save(cc_env.prj_resist_rast)
    else:
        xmin = climate_extent.XMin
        ymin = climate_extent.YMin
        xmax = climate_extent.XMax
        ymax = climate_extent.YMax

        ones_resist_rast = sa.Con(sa.IsNull(cc_env.climate_rast),
                                  sa.Int(cc_env.climate_rast), 1)
        ones_resist_rast.save(cc_env.prj_resist_rast)

    arcpy.CopyRaster_management(cc_env.climate_rast, cc_env.prj_climate_rast)

    # Create core raster
    arcpy.env.extent = arcpy.Extent(xmin, ymin, xmax, ymax)
    lm_util.delete_data(cc_env.prj_core_rast)
    arcpy.FeatureToRaster_conversion(
        cc_env.core_fc, cc_env.core_fld, cc_env.prj_core_rast,
        arcpy.Describe(cc_env.climate_rast).MeanCellHeight)
    arcpy.env.extent = None

    # Create array of boundary points
    array = arcpy.Array()
    pnt = arcpy.Point(xmin, ymin)
    array.add(pnt)
    pnt = arcpy.Point(xmax, ymin)
    array.add(pnt)
    pnt = arcpy.Point(xmax, ymax)
    array.add(pnt)
    pnt = arcpy.Point(xmin, ymax)
    array.add(pnt)
    # Add in the first point of the array again to close polygon boundary
    array.add(array.getObject(0))
    # Create a polygon geometry object using the array object
    ext_feat = arcpy.Polygon(array)
    arcpy.CopyFeatures_management(ext_feat, ext_poly)
    # Clip core feature class
    arcpy.Clip_analysis(cc_env.core_fc, ext_poly, cc_env.prj_core_fc)
Example #2
0
def arcpy_con():
    # arcpy condition
    # 0-100
    arcpy.CheckOutExtension('Spatial')
    arcpy.env.workspace = r'D:\FVC\FVC_1km_new\fusion_int1\\'
    out_dir = r'D:\FVC\FVC_1km_new\fusion_int_0-100_1\\'
    mkdir(out_dir)
    rasters = arcpy.ListRasters('*.tif*')
    for raster in rasters:
        # Save temp raster to disk with new name
        print(raster)
        ras = sa.Raster(raster)
        # arcpy.CalculateStatistics_management(ras)
        fvc = ras
        fvc = sa.Con(fvc, 0, fvc, "VALUE < 0")
        fvc = sa.Con(fvc, 100, fvc, "VALUE > 100")
        fvc.save(out_dir+raster)
def main():
    arcpy.CheckOutExtension('Spatial')
    arcpy.env.workspace = this_root+'\\AVHRR_clipped\\'
    rasters = arcpy.ListRasters('*.tif*')
    # print(rasters)
    time_init = time.time()
    flag = 0
    for raster in rasters:
        # Save temp raster to disk with new name
        start = time.time()
        ras = sa.Raster(raster)
        arcpy.CalculateStatistics_management(ras)
        max = ras.maximum
        fvc = ras/8000.
        fvc = sa.Con(fvc, 0, fvc, "VALUE < 0")
        fvc = sa.Con(fvc, 1, fvc, "VALUE > 1")
        fvc.save(this_root+'AVHRR_fvc\\'+raster)
        end = time.time()
        logger.process_bar(flag,len(rasters),time_init,start,end,raster)
        flag += 1
        # break

    pass
    def create_masked_raster(self, in_raster, mask_raster, out_raster):
        """
        Create a masked raster

        Parameters:
        -----------
        in_raster : str
            input raster to apply mask
        mask_raster: str
            raster to use as a mask
        out_raster: str
            name of output masked raster

        Returns:
        --------
        None
        """
        print('Creating masked raster for ' + in_raster)
        try:
            scratch = sa.Con(mask_raster, in_raster, mask_raster, "VALUE=0")
            scratch.save(out_raster)
        except:
            raise Exception(arcpy.GetMessages())
Example #5
0
    dict = {}
    dict[1] = 3  #D
    dict[2] = 2  #C
    dict[3] = 1  #V
    dict[4] = 2  #C/D
    dict[5] = 0  #A
    dict[6] = 1  #B/D
    dict[7] = 0  #A/D
    return dict


cnDict = createCNDict()
hgrpDict = createHgrpLookup()

# Plug NoData values in hgrpRasters with a class of D
hgrpFix = sa.Con(sa.IsNull(hgrpRaster), 1, hgrpRaster)

# combine the rasters
msg("Combining NLCD and Hydrologic group rasters")
comboRaster = sa.Combine([nlcdRaster, hgrpFix])
nlcdFld = arcpy.ListFields(comboRaster)[-2].name
hgrpFld = arcpy.ListFields(comboRaster)[-1].name

# add a new field to the comboRaster
msg("Adding curve number field to output raster")
arcpy.AddField_management(comboRaster, "CN", "LONG", 4)

# update values based on the dictionary
msg("Adding curve number values to output raster")
rows = arcpy.UpdateCursor(comboRaster)
row = rows.next()
    print txt
    if type == "message":
        arcpy.AddMessage(txt)
    elif type == "warning":
        arcpy.AddWarning(txt)
    elif type == "error":
        arcpy.AddError(txt)


# ---Processes---
## Create a raster of stream cells, with values set to elevation
msg("Creating a streams raster (inverting NHD FlowDirNull Raster)")
strmRaster = sa.IsNull(fdrnullRaster)

msg("Assigning elevation values to stream cells")
strmElevRaster1 = sa.Con(strmRaster, elevRaster)
#If the elevation values
strmElevRaster = sa.Con(strmElevRaster1, strmElevRaster1, 1, "VALUE >= 1")

## Create a watershed, using the stream elevation raster as the pour points.
'''Cell values in this output are the elevation at the point where the given
   cell's location flows into the stream. This, in turn, can be compared to
   the actual elevation at the cell's location to compute the vertical drop
   between the cell and where it drains into the stream'''
msg("Calculating watersheds labeled with elevation")
try:
    elevSheds = sa.Watershed(fldrRaster, strmElevRaster, "VALUE")
except:
    msg(arcpy.GetMessages(), "error")
    sys.exit(1)
Example #7
0
arcpy.CheckOutExtension("Spatial")

#Allow to overwrite files
arcpy.env.overwriteOutput = True

#Get relative paths
scriptWS = os.path.basename(sys.argv[0])
rootWS = os.path.dirname(sys.path[0])
dataWS = os.path.join(rootWS, "Data")
tempWS = os.path.join(rootWS, "Scratch")

#Set Environmental Variables
arcpy.env.workspace = tempWS + "\\rasters_elev"

# Creates a Raster Object from the forest cover for map algebra calculations
habitat = arcpy.Raster(dataWS + "\\forest_cover.img")

#Get rasters refined by elevation
rasters = arcpy.ListRasters()
print rasters

#Refine ranges by habitat
for in_raster in rasters:
    mask_raster = arcpy.Raster(in_raster)
    arcpy.env.mask = mask_raster
    out_raster = sa.Con(habitat == 1, 1, 0)  #Refine by habitat
    out_raster.save((tempWS + "\\rasters_hab\\" +
                     in_raster))  #Save the refined range to scratch folder

print "Done!"
Example #8
0
wtDict['BETWEENNESS'] = float(sys.argv[7])
wtDict['CLOSENESS'] = float(sys.argv[8])
wtDict['CONNECTEDAREA'] = float(sys.argv[9])
wtDict['IDWAREA'] = float(sys.argv[10])

#Output variables
wtdSumRaster = sys.argv[11]

##-PROCESSING-
# Get a list of rasters in the stack
origWS = arcpy.env.workspace
arcpy.env.workspace = arcpy.Describe(patchStack).CatalogPath
bandList = arcpy.ListRasters()
#arcpy.env.workspace = origWS

# Loop through the bands
first = True
for band in wtDict.keys():
    weight = wtDict[band]
    if weight == 0: continue
    arcpy.AddMessage("Band %s will comprise %s percent" % (band, weight))
    #Conver the band to a weighted input
    wtRaster = sa.Slice(band, 100, "EQUAL_INTERVAL") * (weight / 100.0)
    if first:  #If it's the first, then copy it
        wtdSum = sa.Con(wtRaster, wtRaster)
        first = False
    else:
        wtdSum = sa.Plus(wtdSum, wtRaster)

wtdSum.save(wtdSumRaster)
Example #9
0
##-FUNCTIONS-
def msg(txt):
    print txt
    arcpy.AddMessage(txt)

##-PROCESSING-
# Create the output workspace, if needed
scratchWS = arcpy.env.scratchWorkspace
if not os.path.exists(os.path.join(scratchWS,'tmpBands')):
    msg("Creating tmp workspce to hold band rasters")
    arcpy.CreateFolder_management(scratchWS,'tmpBands')
arcpy.env.scratchWorkspace = scratchWS + "\\tmpBands"

# Create a copy of the patch raster
msg("Creating base raster")
patchGrid = sa.Con(patchRaster,patchRaster)

# Join the attributes to the patchGrid
msg("...joining attributes")
for tbl in CSVs.split(";"):
    result = arcpy.JoinField_management(patchGrid,"VALUE",tbl,"PatchID",["Rank"])

# Loop through the fields and create individual rasters
msg("...creating bands")
attribFlds = arcpy.ListFields(patchGrid)
keepBands = []
keepNames = []
for fld in attribFlds:
    if not fld.name in ['Rowid','VALUE','GRID_CODE','grid_code','PATCHID','COUNT']:
        msg("   ...adding band %s" %fld.name)
        keepNames.append(fld.name)
Example #10
0
 # Clip slope service layers
 arcpy.AddMessage("Clipping slope...")
 slopeClip = os.path.join(scratch,"slopeClip")
 #arcpy.MakeRasterLayer_management(inputSlope,"SlopeLayer")
 #arcpy.CopyRaster_management("SlopeLayer",slopeClip)
 outSlope = sa.ExtractByMask(inputSlope,inputAOI)
 outSlope.save(slopeClip)
 deleteme.append(slopeClip)
 
 # Set all Slope values greater than the vehicle's off road max to that value
 arcpy.AddMessage("Reclassifying Slope ...")
 reclassSlope = os.path.join(os.path.dirname(scratch),"reclassSlope.tif")
 if debug == True: arcpy.AddMessage("reclassSlope: " + str(reclassSlope))
 #float(vehicleParams[5])
 if debug == True: arcpy.AddMessage(str(time.strftime("Con: %m/%d/%Y  %H:%M:%S", time.localtime())))
 outCon = sa.Con(sa.Raster(slopeClip) > float(vehicleParams[5]),float(vehicleParams[5]),sa.Raster(slopeClip))    
 # FAILS HERE:
 outCon.save(reclassSlope)
 # ERROR 010240: Could not save raster dataset to C:\Workspace\MAoT for A4W\A4W\test.gdb\reclassSlope with output format FGDBR.
 #
 # 010240 : Could not save raster dataset to <value> with output format <value>.
 #
 # Description
 # The output raster dataset could not be created in the specified format. There may already exist an output raster with the
 # same name and format. Certain raster formats have limitations on the range of values that are supported. For example, the GIF
 # format only supports a value range of 0 to 255, which would be a problem if the output raster would have a range of -10 to 365.
 #
 # Solution
 # Check that a raster with the same name and format does not already exist in the output location. Also, check the technical
 # specifications for raster dataset formats to make sure that the expected range of values in the output is compatible with
 # the specified format.
Example #11
0
distanceThreshold = float(1200)
#Output variable
threatCSV_Filename = sys.argv[3]


##--FUNCTIONS--
def msg(txt):
    print txt
    arcpy.AddMessage(txt)
    return


##--PROCESSES--
# Extract development from NLCD
msg("Extracting developed area from NLCD")
devBinary = sa.Con(nlcdRaster, 1, 0, "VALUE IN (22,23,24)")

# Compute focal mean of development
msg("Calculating focal stats")
nbrHood = sa.NbrCircle(distanceThreshold, "MAP")
focalMean = sa.FocalStatistics(devBinary, nbrHood, "MEAN")

# Compute distance decay
msg("Computing distance decayed development")
devNodata = sa.Con(nlcdRaster, 1, '', "VALUE IN (22,23,24)")
eucDist = sa.EucDistance(devNodata)
k = math.log(0.01) / distanceThreshold
devDecay = sa.Exp(eucDist * k)

# FOCAL MEAN: Compute zonal stats
msg("Computing patch threat values")
Example #12
0
    def glacierMaskProcessing(self):
        """
        Program description:
        Die Schwellwerte für die Ratiobilder sollten individuell angepasst werden, liegen aber in der Regel um 2.0. 
        TM4/TM5 hat weniger Fehler in den Schattenbereichen aber es wird weniger Schutt erfasst als bei TM3/TM5. 
        Daher empfehlen Paul und Kääb (2005) noch einen zusätzlichen Schwellwert im ersten Kanal. 
        Was auch immer am Ende verwendet wird, eine visuelle Kontrolle und manuelle Verbesserung ist am Ende notwendig.
        
        Bei den Schwellwerten geht es um das mögliche Abbrechen von Eismassen und ist nicht als Schwellwert für das Gletschervorkommen zu sehen.
        Dennoch ist 45° zu testen, da bei SRTM die wahren Neigungen durch die Glättung/Auflösung eher höher sind. 60° ist m.E. mit SRTM zu hoch.
  
        INPUT_PARAMETERS:
        inputValue_      - 

        COMMENTS:
        """

        inFile = tkFileDialog.askopenfilename(
            defaultextension='TIFF',
            filetypes=[('ERDAS IMAGINE', '*.img'), ('TIFF', '*.tif')],
            initialdir=self.workspace,
            initialfile=INITFILE_RASTER,
            multiple=False,
            parent=tkRoot,
            title=
            'Chose input multispectral raster file for glacier mask processing (ArcPy)'
        )

        #-------------------------------------------------------------------------------
        #Image ration

        #ratioMethod = 'ndsi' #best with threshold 0.6
        ratioMethod = 'tm3tm5'  #best with threshold 2.6 #--> Best method!
        #ratioMethod = 'tm4tm5' #best with threshold 2.0

        print "Start creating ratio image with method '" + str(
            ratioMethod) + "' ..."

        if ratioMethod == 'ndsi':
            pRatio = self.pArcPyTools.NDSI(
                inFile
            )  #Using Normalized-Difference Snow Index (NDSI) --> See Function
            thresholdRatio = 0.6  #Thresholding NDSI > threshold = snow/ice ...  0.5 - 0.6, or 0.7 (rocaviteanuetal2008); #best value: 0.6

        elif ratioMethod == 'tm3tm5':  #Red/SWIR
            pRatio = sa.Divide(sa.Float(sa.Raster(inFile + "\Band_3")),
                               sa.Float(sa.Raster(inFile + "\Band_5")))
            thresholdRatio = 2.6  #tm3tm5: th=2 (rocaviteanuetal2008); CCI: about 1.8; Paul and Andreassen (2009): 2.6; #best value: 2.6

        elif ratioMethod == 'tm4tm5':  #VNIR/SWIR
            pRatio = sa.Divide(sa.Float(sa.Raster(inFile + "\Band_4")),
                               sa.Float(sa.Raster(inFile + "\Band_5")))
            thresholdRatio = 2.0  #Tim Golletz threshold = 3 (tm4/tm5) cf. Paul 2001; # best value: 2.0

        else:
            raise Exception("Error: No ratio approach chosen")

        outFileName = str(ratioMethod) + "_arcpy_" + sa.Raster(inFile).name
        pRatio.save(outFileName)
        print "Ratio image '" + str(
            outFileName) + "' with ratio method '" + str(
                ratioMethod) + "' created."

        #-------------------------------------------------------------------------------
        #Threshold on slope: Criteria:
        #--> Slope > 60° --> no glacier (ICIMOD);
        #--> Slope <= 24° --> glacier (Bolch et al. 2005, 2006, 2008)
        #--> Alean (1985), cited by Bolch et al. 2011: threshold of 45° for the slope of the detachment zone for cold glaciers and 25° for warm glaciers (cold glacier threashold of Alean (1985): 45; ICIMOD: 60)

        thresholdSlope = float(90)  #ignoring value: 90
        print "Start creating slope file with threshold '" + str(
            thresholdSlope) + "' ..."

        #Resample input DEM and derived slope to resolution of ratio image: Data Management Tools --> Raster --> Raster Processing: Resample
        inFileNameDem = tkFileDialog.askopenfilename(
            defaultextension='TIFF',
            filetypes=[('ERDAS IMAGINE', '*.img'), ('TIFF', '*.tif')],
            initialdir=self.workspace,
            initialfile=INITFILE_DEM,
            multiple=False,
            parent=tkRoot,
            title=
            'Chose input elevation raster file for slope determination (ArcPy)'
        )
        pInFileDem = sa.Raster(inFileNameDem)
        outFileNameDem = "res" + str(pRatio.meanCellWidth).replace(
            ".", "pt") + "_" + pInFileDem.name
        arcpy.Resample_management(
            pInFileDem, outFileNameDem, pRatio.meanCellWidth, "CUBIC"
        )  #BILINEAR #CUBIC #resample to Input scene size = Landsat 30m

        #Spatial Analyst Tools --> Surface: Slope (Aspect)
        pOutSlope = sa.Slope(outFileNameDem, "DEGREE", 1)
        pOutSlope.save("slope_" + outFileNameDem)
        print "Slope file '" + str(
            "slope_" +
            outFileNameDem) + "' created out of DEM input file '" + str(
                inFileNameDem) + "' and resampled to a resolution of '" + str(
                    pRatio.meanCellWidth) + "'."

        #-------------------------------------------------------------------------------
        #Additional threshold on TM1
        pB1 = sa.Raster(
            inFile + "\Band_1"
        )  #Use of additional threshold in TM1 to improve classification  in cast shadow (CCI, Paul2005 --> rocaviteanuetal2008)
        thresholdB1 = float(
            0)  #Paul and Andreassen (2009): TM1 (DNs >59) #ignoring value: 0

        #-------------------------------------------------------------------------------
        #Thresholding glacier yes/no

        print "Start thresholding ratio image..."

        #Spatial Analyst Tools --> Conditional: Con
        pBinRatio = sa.Con(
            ((pRatio > thresholdRatio) & (pOutSlope < thresholdSlope) &
             (pB1 > thresholdB1)), 1, 0)  #Threshold on ratio

        outFileName = "bin" + str(thresholdRatio).replace(
            ".", "pt") + "_slope" + str(thresholdSlope).replace(
                ".", "pt") + "_1tm" + str(thresholdB1).replace(
                    ".", "pt") + "_" + str(outFileName)
        pBinRatio.save(outFileName)

        print "Binary ratio image '" + str(
            outFileName) + "' with ratio method '" + str(
                ratioMethod) + "', ratio threshold '" + str(
                    thresholdRatio) + "', slope threshold '" + str(
                        thresholdSlope) + "' and TM1 threshold '" + str(
                            thresholdB1) + "' created."

        #-------------------------------------------------------------------------------
        #Raster to Vector

        self.pArcPyTools.rasterToVector(
            outFileName, "median", 10000
        )  #Eliminate areas smaller 0.01 km^2 = 10000 Square Meters (rocaviteanuetal2008); 0.02 km^2 (ICIMOD)

        #Detect spatial autocorrelation: Spatial Statistic Tools --> Analyzing Patterns: Spatial Autocorrelation (Morans I)
        #--> Not applicable here

        return
Example #13
0
    return remap


## --PROCESSES--
# Solar Radiation
msg("Calculating insolation")
#solRaster = sa.Hillshade(elevRaster, 225, 35, "SHADOWS", zValue)
solRaster = (sa.Cos(
    (sa.Aspect(elevRaster) - 45) * math.pi / 180.0) * -100) + 100

# Topographic convergence index
msg("Calculating topographic convergence")
slopeRad = sa.Slope(elevRaster, "DEGREE", zValue) * math.pi / 180.0
tciRaster1 = sa.Ln(sa.Plus(flacRaster, 30.0) / sa.Tan(slopeRad))
# Where slope is zero, set TCI to its max value
tciRaster = sa.Con(slopeRad, tciRaster1.maximum, tciRaster1, "Value = 0")

# Create class breaks according to the method selected
if parseMethod in ("EQUAL_INTERVAL", "EQUAL_AREA"):
    msg("Using equal interval breaks")
    #Slice values
    msg("...decomposing values into equal quantiles")
    elevSlice = sa.Slice(elevRaster, 5, parseMethod, 1) * 100
    solSlice = sa.Slice(solRaster, 5, parseMethod, 1)
    tciSlice = sa.Slice(tciRaster, 5, parseMethod, 1)
    # Remap the extremes of the solar and tci rasters
    msg("...recoding group values")
    remap = sa.RemapValue([[1, 1], [2, 2], [3, 2], [4, 2], [5, 3]])
    solRemap = sa.Reclassify(solSlice, "VALUE", remap) * 10
    tciRemap = sa.Reclassify(tciSlice, "VALUE", remap)
    # Combine into zip codes
Example #14
0
dem = arcpy.Raster("dem.img")

# Specify the feature class with the ranges of birds 
birds = "birds.shp"

# Create a cursor
rows = arcpy.SearchCursor(birds)

for ranges in rows:
    spp = ranges.getValue("SCINAME")         #Get the scientific name found in the attribute table
    min_elev = ranges.getValue("MIN_ELE")    #Get the minimum elevation found in the attribute table  
    max_elev = ranges.getValue("MAX_ELE")    #Get the maximum elevation found in the attribute table 
    print spp, min_elev, max_elev
    spp_raster = arcpy.Raster(os.path.join(tempWS + "\\rasters", spp + ".img"))     #Get the spp raster
    arcpy.env.mask = spp_raster                                                     #Use it as a mask
    out_raster = sa.Con(((dem > min_elev) & (dem < max_elev)),1)                  #Refine by elevation
    out_raster.save((tempWS + "//rasters_elev//{}.img").format(spp))           #Save the refined range to scratch folder
    
print "Done!"

    
    
   
    

	




Example #15
0
##Geoprocessing
arcpy.AddMessage("Filling Dem")
fill = sa.Fill(inputdem)

arcpy.AddMessage("Processing Flowdirection")
flowdir = sa.FlowDirection(fill)

arcpy.AddMessage("Processing Flowaccumulation")
flowacc = sa.FlowAccumulation(flowdir)

if threshold == "":
    arcpy.AddMessage("Default Threshold Value")
    threshold = "2000"
arcpy.AddMessage("Building Stream Network")
con = sa.Con(flowacc, "1", "", "Value > " +threshold,)

arcpy.AddMessage("Creating StreamLink")
streamlin = sa.StreamLink(con,flowdir)

arcpy.AddMessage("Creating StreamOrder")
streamord = sa.StreamOrder(con,flowdir,"STRAHLER")

arcpy.AddMessage("Converting Raster Streams to Vector Streams")
streamtf = sa.StreamToFeature(streamlin,flowdir, "streamtofeature", "NO_SIMPLIFY")

arcpy.AddMessage("Generating Pour Point")
pourp = arcpy.FeatureVerticesToPoints_management(streamtf, "pourpoint", "END")

if snapdis == "":
    arcpy.AddMessage("Default Snap Distance")
Example #16
0
def preprocess(layerName):
    """
	Source dir: resistances
	Output dir: preprocessed
	"""
    paths.setEnv(env)
    env.overwriteOutput = True

    source = paths.join(paths.resistances, layerName)
    output = paths.join(paths.preprocessed, layerName)
    print "Preprocessing {}: {} ==> {}".format(layerName, source, output)

    if arcpy.Exists(output) and not redoExistingOutput:
        print "{} already exists, leaving it as is.".format(output)
        return output

    for tempras in ["projected", "clipped"]:
        try:
            arcpy.Raster(tempras)
            print "Deleting temporary: {}".format(tempras)
            arcpy.Delete_management(tempras)
        except RuntimeError:
            pass  # tempfile doesn't exist, we're good

    print "Projecting and resampling..."
    arcpy.ProjectRaster_management(source,
                                   "projected",
                                   paths.alaskaAlbers,
                                   cell_size=500)

    source = arcpy.Raster(source)
    print "Initial extent:      {} {} {} {}".format(source.extent.XMin,
                                                    source.extent.YMin,
                                                    source.extent.XMax,
                                                    source.extent.YMax)

    newExt = arcpy.Describe(paths.studyArea).extent
    print "New intended extent: {} {} {} {}".format(newExt.XMin, newExt.YMin,
                                                    newExt.XMax, newExt.YMax)
    print "Clipping..."
    # TODO: despite specifying an extent, it refuses to clip to exactly that (except by filling in borders with NoData)
    arcpy.Clip_management("projected", "", "clipped", paths.studyArea, "",
                          "ClippingGeometry", "NO_MAINTAIN_EXTENT")
    clipped = arcpy.Raster("clipped")

    print "New actual extent:   {} {} {} {}".format(clipped.extent.XMin,
                                                    clipped.extent.YMin,
                                                    clipped.extent.XMax,
                                                    clipped.extent.YMax)

    print "Normalizing..."
    outRas = arcpy.Raster("clipped")
    outRas = (outRas - outRas.minimum) / (outRas.maximum - outRas.minimum)
    # de-null
    outRas = sa.Con(sa.IsNull(outRas), 0, outRas)

    env.overwriteOutput = True

    outRas.save(output)
    arcpy.Delete_management("projected")
    arcpy.Delete_management("clipped")
    return output
Example #17
0
def get_centerline (feature, dem, workspace, power = 5, eu_cell_size = 10):
    """Returns a center line feature of the given polygon feature based on
    cost over an euclidean distance raster and cost path. points are seeded
    using minimum and maximum elevation."""    
    centerline = workspace + '\\centerline.shp'
    center_length = 0
    center_slope = 0
    smoothing = 4
    trim_distance = "100 Meters"

    try: 
        # Setup extents / environments for the current feature
        ARCPY.env.extent = feature.shape.extent
        desc = ARCPY.Describe(feature)
        XMin_new = desc.extent.XMin - 200
        YMin_new = desc.extent.YMin - 200
        XMax_new = desc.extent.XMax + 200
        YMax_new = desc.extent.YMax + 200
        ARCPY.env.extent = ARCPY.Extent(XMin_new, YMin_new, XMax_new, YMax_new)
    
        ARCPY.env.overwriteOutput = True
        ARCPY.env.cellSize = eu_cell_size
        ARCPY.env.snapRaster = dem
        
        
        # Get minimum and maximum points
        resample = ARCPY.Resample_management (dem, 'in_memory\\sample', eu_cell_size)
        masked_dem = spatial.ExtractByMask (resample, feature.shape)
    
    
        # Find the maximum elevation value in the feature, convert them to
        # points and then remove all but one.
        maximum = get_properties (masked_dem, 'MAXIMUM') 
        maximum_raster = spatial.SetNull(masked_dem, masked_dem, 'VALUE <> ' + maximum)
        maximum_point = ARCPY.RasterToPoint_conversion(maximum_raster, 'in_memory\\max_point')
        rows = ARCPY.UpdateCursor (maximum_point)
        for row in rows:
            if row.pointid <> 1:
                rows.deleteRow(row)
        del row, rows
        
        # Find the minimum elevation value in the feature, convert them to
        # points and then remove all but one.
        minimum = get_properties (masked_dem, 'MINIMUM')
        minimum_raster = spatial.SetNull(masked_dem, masked_dem, 'VALUE <> ' + minimum)
        minimum_point = ARCPY.RasterToPoint_conversion(minimum_raster, 'in_memory\\min_point')
        rows = ARCPY.UpdateCursor (minimum_point)
        for row in rows:
            if row.pointid <> 1:
                rows.deleteRow(row)
        del row, rows
        
        # Calculate euclidean Distance to boundary line for input DEM cells.
        polyline = ARCPY.PolygonToLine_management(feature.shape, 'in_memory\\polyline')
        eucdist =spatial.EucDistance(polyline, "", eu_cell_size, '')
         
        masked_eucdist = spatial.ExtractByMask (eucdist, feature.shape)
        
        # Calculate the cost raster by inverting the euclidean distance results,
        # and raising it to the power of x to exaggerate the least expensive route.
        cost_raster = (-1 * masked_eucdist + float(maximum))**power
            
        # Run the cost distance and cost path function to find the path of least
        # resistance between the minimum and maximum values. The results are set
        # so all values equal 1 (different path segments have different values)
        # and convert the raster line to a poly-line.
        backlink = 'in_memory\\backlink'
        cost_distance = spatial.CostDistance(minimum_point, cost_raster, '', backlink) 
        cost_path = spatial.CostPath(maximum_point, cost_distance, backlink, 'EACH_CELL', '')
        cost_path_ones = spatial.Con(cost_path, 1, '', 'VALUE > ' + str(-1)) # Set all resulting pixels to 1
        r_to_p = ARCPY.RasterToPolyline_conversion (cost_path_ones, 'in_memory\\raster_to_polygon')
        
        
        del ARCPY.env.extent # Delete current extents (need here but do not know why)
        
        # Removes small line segments from the centerline shape. These segments are
        # a byproduct of cost analysis.
        lines = str(ARCPY.GetCount_management(r_to_p)) #check whether we have more than one line segment
        if float(lines) > 1: # If there is more then one line
            rows = ARCPY.UpdateCursor(r_to_p)
            for row in rows:
                if row.shape.length == eu_cell_size: # delete all the short 10 m lines
                    rows.deleteRow(row)
            del row, rows
            lines = str(ARCPY.GetCount_management(r_to_p))
            if float(lines) > 1:
                ARCPY.Snap_edit(r_to_p, [[r_to_p, "END", "50 Meters"]]) # make sure that the ends of the lines are connected
                r_to_p = ARCPY.Dissolve_management(r_to_p, 'in_memory\\raster_to_polygon_dissolve')
    
    
        # Smooth the resulting line. Currently smoothing is determined by minimum
        # and maximum distance. The greater change the greater the smoothing.
        smooth_tolerance = (float(maximum) - float(minimum)) / smoothing
        ARCPY.SmoothLine_cartography(r_to_p, centerline, 'PAEK', smooth_tolerance, 'FIXED_CLOSED_ENDPOINT', 'NO_CHECK')
    
        field_names = [] # List of field names in the file that will be deleted.
        fields_list = ARCPY.ListFields(centerline)
        for field in fields_list: # Loop through the field names
            if not field.required: # If they are not required append them to the list of field names.
                field_names.append(field.name)
        # Add new fields to the center line feature
        ARCPY.AddField_management(centerline, 'GLIMSID', 'TEXT', '', '', '25')
        ARCPY.AddField_management(centerline, 'LENGTH', 'FLOAT')
        ARCPY.AddField_management(centerline, 'SLOPE', 'FLOAT')
        ARCPY.DeleteField_management(centerline, field_names) # Remove the old fields.
        
        
        # Calculate the length of the line segment and populate segment data.
        ARCPY.CalculateField_management(centerline, 'LENGTH', 'float(!shape.length@meters!)', 'PYTHON')
        rows = ARCPY.UpdateCursor (centerline)
        for row in rows:
            row.GLIMSID = feature.GLIMSID # Get GLIMS ID and add it to segment
            center_length = row.LENGTH # Get the length of the center line
            # Calculate slope of the line based on change in elevation over length of line
            center_slope = round(math.degrees(math.atan((float(maximum) - float(minimum)) / row.LENGTH)), 2)
            row.SLOPE = center_slope # Write slope to Segment
            rows.updateRow(row) # Update the new entry
        del row, rows #Delete cursors and remove locks    
        
        
        # Flip Line if needed - Turn min point and end point into a line segment if
        # the length of this line is greater then the threshold set, flip the line.
        end_point = ARCPY.FeatureVerticesToPoints_management(centerline, 'in_memory\\end_point', 'END')
        merged_points = ARCPY.Merge_management ([end_point, minimum_point], 'in_memory\\merged_points')
        merged_line = ARCPY.PointsToLine_management (merged_points, 'in_memory\\merged_line')
        
        merged_line_length = 0 # Get the line Length
        rows = ARCPY.SearchCursor (merged_line)
        for row in rows:
            merged_line_length += row.shape.length
        del row, rows
            
        # if the line length is greater then a quarter the entire feature length, flip
        if merged_line_length > (center_length/4):
            ARCPY.FlipLine_edit(centerline)
    
    
        # This function attempts to extend the line and clip it back to the 
        # feature extents in order to create a line that runs from edge to edge
        #trimmed_line = ARCPY.Merge_management([polyline, centerline], 'in_memory\\line_merge')
        trimmed_line = ARCPY.Append_management (polyline, centerline, 'NO_TEST')
        ARCPY.TrimLine_edit (trimmed_line, trim_distance, "DELETE_SHORT")
        ARCPY.ExtendLine_edit(trimmed_line, trim_distance, "EXTENSION")
        
        rows = ARCPY.UpdateCursor (trimmed_line)
        for row in rows:
            if row.LENGTH == 0.0:
                rows.deleteRow(row)
        del row, rows
        # Recalculate length. Must be after 0.0 lengths are deleted or they will
        # not be removed above.
        ARCPY.CalculateField_management(centerline, 'LENGTH', 'float(!shape.length@meters!)', 'PYTHON')
    
    
        ARCPY.env.overwriteOutput = False
        return centerline, center_length, center_slope, False
    except:
        ARCPY.env.overwriteOutput = False
        return centerline, '', '', True
Example #18
0
#Output variables
dist2edgeRaster = sys.argv[
    6]  #r'C:\WorkSpace\GBAT2012\GHAT_V011\scratch\dist2edge'
corePatches = sys.argv[
    7]  #r'C:\WorkSpace\GBAT2012\GHAT_V011\scratch\corePatches'
dist2habRaster = sys.argv[
    8]  #r'C:\WorkSpace\GBAT2012\GHAT_V011\scratch\dist2hab'
subnetRaster = sys.argv[9]  #r'C:\WorkSpace\GBAT2012\GHAT_V011\scratch\subnet'
dist2protRaster = sys.argv[
    10]  #r'C:\WorkSpace\GBAT2012\GHAT_V011\scratch\dist2prot'

##--Process 1: Calculate distance to patch edge--
arcpy.AddMessage("Calculating distance to patch edge")
arcpy.AddMessage("...inverting habitat raster")
nonHabitatBinary = sa.Con(sa.IsNull(patchRaster), 1)
arcpy.AddMessage("...calculating distances from edge into patch")
eucDist = sa.EucDistance(nonHabitatBinary)
eucDist.save(dist2edgeRaster)

##--Process 2: Extract core areas (using distance to edge)
arcpy.AddMessage("Extracting core areas")
core = sa.SetNull(eucDist, patchRaster, "VALUE <= %s" % edgeWidth)
core.save(corePatches)

##--Process 3: Calculate cost distance away from patch
arcpy.AddMessage("Calculating cost distance from patches")
costDist2H = sa.CostDistance(patchRaster, costRaster)
costDist2H.save(dist2habRaster)

##-Process 4: Extract patch subnetwork areas (from distance to habitat)
Example #19
0
    patchIDs.append(row.VALUE)
    row = rows.next()
del row, rows

# Loop through patches and and calculate least cost paths
streamFC = "in_memory/LCPlines"
first = True
for patchID in patchIDs:
    msg("Working on patch %s of %s" % (patchID, len(patchIDs)))
    # Idenfity the cost and back link rasters
    cdRaster = os.path.join(CostDistWS, "CD_%s.img" % patchID)
    blRaster = os.path.join(CostDistWS, "BL_%s.img" % patchID)
    # Calculate least cost paths from all patches to the current patch
    lcpRaster = sa.CostPath(patchFix, cdRaster, blRaster, "EACH_ZONE")
    if first:
        lcpOutput = sa.Con(sa.IsNull(lcpRaster), 0, 1)
        first = False
    else:
        lcpTemp = sa.Con(sa.IsNull(lcpRaster), 0, 1) + lcpOutput
        lcpOutput = sa.Con(lcpTemp, 1, 0, "VALUE > 0")
    '''
    # Convert the backlink to a flow direction raster
    #fdRaster = sa.Int(sa.Exp2(blRaster) / 2)
    # Convert the LCP raster to a vector
    if first:   # If the first patch, save the streamsFC to the output FC file
        sa.StreamToFeature(lcpRaster,fdRaster,lcpFC,"NO_SIMPLIFY")
        first = False
    else:       # Otherwise, create it and append it to the original
        sa.StreamToFeature(lcpRaster,fdRaster,streamFC,"NO_SIMPLIFY")
        arcpy.Append_management(streamFC,lcpFC)
    '''
Example #20
0
# Message function
def msg(txt): print txt; arcpy.AddMessage(txt); return

##-PROCESSES-
# 1. Convert curve number to a weight: Int((100 - cn) / 10). Higher CN values
#  reflect increased runoff across the cell. This equation inverts the
#  CN value and scales it to values from 0 to 10. The resulting value reflects
#  a proxy for infiltration: higher values suggest more runoff stays in the cell
#  meaning less pollutants will leave the cell to downstream neighbors. 
msg("Calculating flow length weight from %s" %cnRaster)
weightRaster = sa.Int(sa.Divide(sa.Minus(100.0,cnRaster), 10.0) + 1)

# 2. Create a flow direction where streams are NoData. This enables calculation
#  of flow length to the stream vs to the stream outlet.
msg("Creating modified flow direction to calculate distance to streams")
fdRaster = sa.Con(sa.IsNull(streamsRaster),flowdirRaster)

# 3. Calculate cost-weighted flowlength. Cell values represent the infiltrated-
#  weighted distance along the flow path to a stream. Two paths may be the same
#  length, but if one goes through cells with high curve numbers (low weights)
#  it's path will be effectively shorter whereas a path going through cells with
#  low curve numbers (high weights) will be effectively longer - in terms of
#  suspended/dissolved pollutants reaching the stream. 
msg("Calculating weighted flow length")
wtdflRaster1 = sa.FlowLength(fdRaster,"DOWNSTREAM",weightRaster) + 30
#Set stream pixels to 0
wtdflRaster = sa.Con(sa.IsNull(streamsRaster), wtdflRaster1, 0)

# 4. Apply a decay coefficient to weighted flow lengths to create distance decay raster
#    k = math.log(0.01) / d, where d is obtained from the unweighted flow length(?)
msg("Calculating distance decayed rasters")