Exemplo n.º 1
0
def topographic_radiation(raw_aspect, radiation_output):
    """
    Description: calculates 32-bit float topographic radiation
    Inputs: 'raw_aspect' -- an input raw aspect raster
            'radiation_output' -- an output topographic radiation raster
    Returned Value: Returns a raster dataset on disk
    Preconditions: requires an input raw aspect raster
    """

    # Import packages
    import arcpy
    from arcpy.sa import Con
    from arcpy.sa import Cos
    from arcpy.sa import Raster

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Calculate topographic radiation aspect index
    print('\t\tCalculating topographic radiation aspect index...')
    numerator = 1 - Cos((3.142 / 180) * (Raster(raw_aspect) - 30))
    radiation_index = numerator / 2

    # Convert negative aspect values
    print('\t\tConverting negative aspect values...')
    out_raster = Con(Raster(raw_aspect) < 0, 0.5, radiation_index)
    out_raster.save(radiation_output)
Exemplo n.º 2
0
    def calcSubtypeDisturbance(AnthroFeatures, subtype, AnthroDisturbanceType):
        """calculate disturbance associated with each subtype"""
        distance = distanceDict[subtype]
        weight = weightDict[subtype]

        AnthroFeatures = Raster(AnthroFeatures)

        if distance > 0:
            arcpy.AddMessage("  Calculating direct and indirect effects of "
                             + str(subtype))
            outEucDist = EucDistance(AnthroFeatures, distance, cellSize)
            tmp1 = 100 - (1/(1 + Exp(((outEucDist / (distance/2))-1)*5))) * weight  # sigmoidal
            # tmp1 = (100 - (weight * Power((1 - outEucDist/distance), 2)))  # exponential
            # tmp1 = 100 - (weight - (outEucDist / distance) * weight)  # linear
            tmp2 = Con(IsNull(tmp1), 100, tmp1)
            subtypeRaster = tmp2
            subtypeRaster.save(AnthroDisturbanceType + "_" + subtype
                               + "_Subtype_Disturbance")
        elif weight > 0:
            arcpy.AddMessage("  Calculating direct effects of "
                             + str(subtype))
            tmp3 = Con(IsNull(AnthroFeatures), 0, AnthroFeatures)
            subtypeRaster = 100 - (tmp3 * weight)
            subtypeRaster.save(AnthroDisturbanceType + "_" + subtype
                               + "_Subtype_Disturbance")
        else:
            subtypeRaster = None

        return subtypeRaster
Exemplo n.º 3
0
def run_con(lower_bounds, upper_bounds, in_grid, true_val, true_alt=None):
    "Conditionally evaluate raster range within bounds." ""

    if config.debug:
        utils.msg("run_con: lb: {} ub: {} grid: {}  val: {}, alt: {}".format(
            lower_bounds, upper_bounds, in_grid, true_val, true_alt))

    out_grid = None

    # if our initial desired output value isn't set, use the backup
    if true_val is None:
        true_val = true_alt
    # calculate our output grid
    if lower_bounds is not None:
        if upper_bounds is not None:
            out_grid_a = Con(in_grid, true_val, 0,
                             "VALUE < {}".format(upper_bounds))
            out_grid = Con(in_grid, out_grid_a, 0,
                           "VALUE > {}".format(lower_bounds))
        else:
            out_grid = Con(in_grid, true_val, 0,
                           "VALUE >= {}".format(lower_bounds))
    elif upper_bounds is not None:
        out_grid = Con(in_grid, true_val, 0,
                       "VALUE <= {}".format(upper_bounds))

    if type(out_grid).__name__ == 'NoneType' and \
       type(true_val) == arcpy.sa.Raster:
        out_grid = true_val

    return out_grid
Exemplo n.º 4
0
def combineProposedWithCurrentDebit(anthroPath, uniqueProposedSubtypes):
    for subtype in uniqueProposedSubtypes:
        # Merge proposed and current feature rasters
        currentAnthroFeature = Raster(os.path.join(anthroPath, subtype))
        proposedAnthroFeature = Raster("Proposed_" + subtype)
        postAnthroFeature = Con(IsNull(proposedAnthroFeature),
                                currentAnthroFeature, proposedAnthroFeature)
        postAnthroFeature.save(os.path.join("Post_" + subtype))
Exemplo n.º 5
0
def do_score(fire, year, day, p, shp):
    """!
    Calculate score 
    @param fire Fire to calculate score for
    @param year Year fire is from
    @param day Day score is for
    @param p Probability raster to compare to
    @param shp Shapefile for actual perimeter
    @return None
    """
    orig_raster = os.path.join(run_output, fire + ".tif")
    orig_raster = Raster(orig_raster) if os.path.exists(orig_raster) else None
    prob_raster = Raster(p)
    raster = os.path.join(run_output,
                          os.path.splitext(os.path.basename(shp))[0] + '.tif')
    perim, raster = rasterize_perim(run_output, shp, year, fire, raster)
    if perim:
        target = Raster(raster)
        # remove the original raster used to start the simulation
        r = Con(IsNull(orig_raster), prob_raster,
                0.0) if orig_raster is not None else prob_raster
        r = SetNull(r == 0.0, r)
        m = Con(IsNull(orig_raster), target,
                0.0) if orig_raster is not None else target
        m = SetNull(m == 0.0, m)
        hits = Con(IsNull(r), 0.0, r) * Con(IsNull(m), 0.0, 1.0)
        misses = Con(IsNull(r), 1.0, 0.0) * Con(IsNull(m), 0.0, 1.0)
        false_positives = Con(IsNull(r), 0.0, r) * Con(IsNull(m), 1.0, 0.0)
        tp = arcpy.RasterToNumPyArray(hits, nodata_to_value=0).sum()
        fn = arcpy.RasterToNumPyArray(misses, nodata_to_value=0).sum()
        fp = arcpy.RasterToNumPyArray(false_positives, nodata_to_value=0).sum()
        total_score = tp / (tp + fn + fp)
        #~ logging.info("Scores are {} + {} + {} = {}".format(tp, fn, fp, total_score))
        scores.append([fire, year, day, p, shp, tp, fn, fp, total_score])
Exemplo n.º 6
0
def create_mask(path_to_temp):
    arcpy.CheckOutExtension('Spatial')
    inputrasters = arcpy.GetParameterAsText(1).split(";")
    rasterlist = []
    for elements in inputrasters:
        rasterobject = Raster(elements)
        rasterlist.append(rasterobject)
    rasterlistSum = sum(rasterlist)                     
    outcon = Con(IsNull(rasterlistSum) == 0,1)          # result raster has 1 when value meets value and NoData when value meets NoData
    outcon.save(join(path_to_temp,"mask"))              # filename is 'mask'
    #arcpy.CheckInExtension('Spatial')                  
    path_to_mask = join(path_to_temp,"mask")
    if arcpy.Exists(path_to_mask):
        arcpy.AddMessage("Creating mask.")
    return path_to_mask
def surface_relief(elevation_input, relief_output):
    """
    Description: calculates 32-bit float surface relief ratio
    Inputs: 'elevation_input' -- an input raster digital elevation model
            'relief_output' -- an output surface relief ratio raster
    Returned Value: Returns a raster dataset on disk
    Preconditions: requires an input elevation raster
    """

    # Import packages
    import arcpy
    from arcpy.sa import Con
    from arcpy.sa import Float
    from arcpy.sa import FocalStatistics
    from arcpy.sa import NbrRectangle

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Define a neighborhood variable
    neighborhood = NbrRectangle(5, 5, "CELL")

    # Calculate local minimum
    print('\t\tCalculating local minimum...')
    local_minimum = FocalStatistics(elevation_input, neighborhood, 'MINIMUM',
                                    'DATA')

    # Calculate local maximum
    print('\t\tCalculating local maximum...')
    local_maximum = FocalStatistics(elevation_input, neighborhood, 'MAXIMUM',
                                    'DATA')

    # Calculate local mean
    print('\t\tCalculating local mean...')
    local_mean = FocalStatistics(elevation_input, neighborhood, 'MEAN', 'DATA')

    # Calculate maximum drop
    print('\t\tCalculating maximum drop...')
    maximum_drop = Float(local_maximum - local_minimum)

    # Calculate standardized drop
    print('\t\tCalculating standardized drop...')
    standardized_drop = Float(local_mean - local_minimum) / maximum_drop

    # Calculate surface relief ratio
    print('\t\tCalculating surface relief ratio...')
    out_raster = Con(maximum_drop == 0, 0, standardized_drop)
    out_raster.save(relief_output)
Exemplo n.º 8
0
def calcConiferPost(coniferTreatmentArea, Conifer_Cover):
    arcpy.AddMessage("Calculating post-project conifer modifier")
    # Add field Conifer to use when converting to raster
    inTable = coniferTreatmentArea
    fieldName = "Conifer"
    fieldType = "SHORT"
    expression = 0
    arcpy.AddField_management(inTable, fieldName, fieldType)
    arcpy.CalculateField_management(inTable, fieldName, expression,
                                    "PYTHON_9.3", "")

    # Convert to raster
    in_features = coniferTreatmentArea
    value_field = "Conifer"
    out_rasterdataset = "Proposed_Conifer_Cover"
    cell_assignment = "MAXIMUM_AREA"
    priority_field = "Conifer"
    cellSize = 30

    coniferRaster = arcpy.PolygonToRaster_conversion(in_features,
                                                     value_field,
                                                     out_rasterdataset,
                                                     cell_assignment,
                                                     priority_field,
                                                     cellSize)

    # Mask existing conifer cover
    coniferPost = Con(IsNull(coniferRaster), Conifer_Cover, coniferRaster)
    coniferPost.save("Post_Conifer_Cover")

    # Calculate neighborhood statistics
    in_raster = coniferPost
    radius = 400
    neighborhood = NbrCircle(radius, "MAP")
    statistics_type = "MEAN"

    coniferCover400 = FocalStatistics(in_raster, neighborhood, statistics_type)

    # Reclassify to get Post_Conifer_Modifier
    in_raster = coniferCover400
    reclass_field = "VALUE"
    remapTable = [[0, 1, 100], [1, 2, 28], [2, 3, 14], [3, 4, 9], [4, 5, 6],
                  [5, 7, 3], [7, 8, 2], [8, 9, 1], [9, 100, 0]]
    coniferModifierPost100 = Reclassify(in_raster, reclass_field,
                                        RemapRange(remapTable))
    coniferModifierPost = Float(coniferModifierPost100) / 100

    return coniferModifierPost
Exemplo n.º 9
0
def getNullSubstituteGrid(lccObj, inLandCoverGrid, inSubstituteGrid,
                          nullValuesList, cleanupList, timer):
    # Set areas in the inSubstituteGrid to NODATA using the nullValuesList. For areas not in the nullValuesList, substitute
    # the grid values with those from the inLandCoverGrid
    LCGrid = Raster(inLandCoverGrid)
    subGrid = Raster(inSubstituteGrid)

    # find the highest value found in LCC XML file or land cover grid
    lccValuesDict = lccObj.values
    maxValue = LCGrid.maximum
    xmlValues = lccObj.getUniqueValueIdsWithExcludes()
    for v in xmlValues:
        if v > maxValue:
            maxValue = v

    # Add 1 to the highest value and then add it to the list of values to exclude during metric calculations
    valueToExclude = int(maxValue + 1)
    excludedValuesFrozen = lccValuesDict.getExcludedValueIds()
    excludedValues = [item for item in excludedValuesFrozen]
    excludedValues.append(valueToExclude)

    # build whereClause string (e.g. "VALUE" <> 11 or "VALUE" <> 12") to identify areas to substitute the valueToExclude
    delimitedVALUE = arcpy.AddFieldDelimiters(subGrid, "VALUE")
    stringStart = delimitedVALUE + " <> "
    stringSep = " or " + delimitedVALUE + " <> "
    whereClause = stringStart + stringSep.join(
        [str(item) for item in nullValuesList])
    AddMsg(timer.split() + " Generating land cover in floodplain grid...")
    nullSubstituteGrid = Con(subGrid, LCGrid, valueToExclude, whereClause)

    return nullSubstituteGrid, excludedValues
Exemplo n.º 10
0
def raster_average_mk2(rasterobject_list, outras):
    # this function improves the previous version in that no value data is considered
    from arcpy.sa import Con, SetNull, CellStatistics

    n = len(rasterobject_list)

    # get mask
    rastermask_list = list()
    for each in rasterobject_list:
        eachmask = Con(each > 32760, 1, 0)
        rastermask_list.append(eachmask)

    sum_mask = CellStatistics(rastermask_list, "SUM")

    # flip values and set null for mask
    # only do this for pixels having more than 6 NoData
    ##    sum_mask = Con(sum_mask>0, None, 1)
    sum_mask = SetNull(sum_mask > 6, 1)

    # it doesn't honor mask
    outras_mask = r"C:\mask_temp.tif"
    sum_mask.save(outras_mask)

    # average, only operate on those valid values
    arcpy.env.mask = outras_mask

    # average
    avg_raster = CellStatistics(rasterobject_list, "MEAN", "DATA")
    avg_raster.save(outras)

    # clear mask
    arcpy.env.mask = None
Exemplo n.º 11
0
def CalcZonalStats(in_zone_data, zone_field, in_value_raster, out_table):
    """
    Resamples inValueRaster to 5m pixel size and calculates the average value
    within each map unit. Higher resolution required for map units <5 acres.
    :param in_zone_data: the Map Units Dissolve feature class
    :param zone_field: the field to use as zone field, must be integer and
    cannot be OBJECTID
    :param in_value_raster: raster dataset or basename as a string
    :param out_table: a name to save the ouput table as a string
    :return: None
    """
    # Convert null values to 0 so that they are not ignored when summarizing
    in_value_raster = Con(IsNull(in_value_raster),0,in_value_raster)

    # Resample to avoid small map units returning null values
    resample = True
    if resample:
        # Resample raster
        tmp_raster = "sub_raster"
        arcpy.Resample_management(in_value_raster, tmp_raster, "5", "NEAREST")
    else:
        tmp_raster = in_value_raster
    # Calculate zonal statistics
    arcpy.gp.ZonalStatisticsAsTable_sa(in_zone_data, zone_field, tmp_raster,
                                    out_table, "DATA", "MEAN")
    if resample:
        arcpy.Delete_management(tmp_raster)
Exemplo n.º 12
0
def applyLekUpliftModifierPre(preSeasonalHabitat, LekPresenceRaster):
    """make the habitat quality of the pre seasonal habtiat raster equal to 1
    wherever the Lek Presence Raster is also 1, ie a lek is present"""
    inRaster = LekPresenceRaster
    inTrueRaster = preSeasonalHabitat
    inFalseConstant = LekPresenceRaster
    whereClause = "VALUE = 0"

    LSDMpre = Con(inRaster, inTrueRaster, inFalseConstant, whereClause)
    return LSDMpre
Exemplo n.º 13
0
def getIntersectOfGrids(lccObj, inLandCoverGrid, inSlopeGrid,
                        inSlopeThresholdValue, timer):

    # Generate the slope X land cover grid where areas below the threshold slope are
    # set to the value 'Maximum Land Cover Class Value + 1'.
    LCGrid = Raster(inLandCoverGrid)
    SLPGrid = Raster(inSlopeGrid)

    # find the highest value found in LCC XML file or land cover grid
    lccValuesDict = lccObj.values
    maxValue = LCGrid.maximum
    xmlValues = lccObj.getUniqueValueIdsWithExcludes()
    for v in xmlValues:
        if v > maxValue:
            maxValue = v

    AddMsg(timer.split() +
           " Generating land cover above slope threshold grid...")
    AreaBelowThresholdValue = int(maxValue + 1)
    delimitedVALUE = arcpy.AddFieldDelimiters(SLPGrid, "VALUE")
    whereClause = delimitedVALUE + " >= " + inSlopeThresholdValue
    SLPxLCGrid = Con(SLPGrid, LCGrid, AreaBelowThresholdValue, whereClause)

    # determine if a grid code is to be included in the effective reporting unit area calculation
    # get the frozenset of excluded values (i.e., values not to use when calculating the reporting unit effective area)
    excludedValues = lccValuesDict.getExcludedValueIds()

    # if certain land cover codes are tagged as 'excluded = TRUE', generate grid where land cover codes are
    # preserved for areas coincident with steep slopes, areas below the slope threshold are coded with the
    # AreaBelowThresholdValue except for where the land cover code is included in the excluded values list.
    # In that case, the excluded land cover values are maintained in the low slope areas.
    if excludedValues:
        # build a whereClause string (e.g. "VALUE" = 11 or "VALUE" = 12") to identify where on the land cover grid excluded values occur
        AddMsg(
            timer.split() +
            " Inserting EXCLUDED values into areas below slope threshold...")
        stringStart = delimitedVALUE + " = "
        stringSep = " or " + delimitedVALUE + " = "
        whereExcludedClause = stringStart + stringSep.join(
            [str(item) for item in excludedValues])
        SLPxLCGrid = Con(LCGrid, LCGrid, SLPxLCGrid, whereExcludedClause)

    return SLPxLCGrid
Exemplo n.º 14
0
def linear_aspect(raw_aspect, aspect_output):
    """
    Description: calculates 32-bit float linear aspect
    Inputs: 'raw_aspect' -- an input raw aspect raster
            'aspect_output' -- an output linear aspect raster
    Returned Value: Returns a raster dataset on disk
    Preconditions: requires an input DEM
    """

    # Import packages
    import arcpy
    from arcpy.sa import ATan2
    from arcpy.sa import Con
    from arcpy.sa import Cos
    from arcpy.sa import FocalStatistics
    from arcpy.sa import Mod
    from arcpy.sa import NbrRectangle
    from arcpy.sa import Raster
    from arcpy.sa import SetNull
    from arcpy.sa import Sin

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Define a neighborhood variable
    neighborhood = NbrRectangle(3, 3, "CELL")

    # Calculate aspect transformations
    print('\t\tTransforming raw aspect to linear aspect...')
    setNull_aspect = SetNull(
        Raster(raw_aspect) < 0, (450.0 - Raster(raw_aspect)) / 57.296)
    sin_aspect = Sin(setNull_aspect)
    cos_aspect = Cos(setNull_aspect)
    sum_sin = FocalStatistics(sin_aspect, neighborhood, "SUM", "DATA")
    sum_cos = FocalStatistics(cos_aspect, neighborhood, "SUM", "DATA")
    mod_aspect = Mod(
        ((450 - (ATan2(sum_sin, sum_cos) * 57.296)) * 100), 36000
    ) / 100  # The *100 and 36000(360*100) / 100 allow for two decimal points since Fmod appears to be gone
    out_raster = Con((sum_sin == 0) & (sum_cos == 0), -1, mod_aspect)

    # Save output raster file
    out_raster.save(aspect_output)
Exemplo n.º 15
0
def function(DEM, streamNetwork, smoothDropBuffer, smoothDrop, streamDrop, outputReconDEM):

    try:
        # Set environment variables
        arcpy.env.extent = DEM
        arcpy.env.mask = DEM
        arcpy.env.cellSize = DEM

        # Set temporary variables
        prefix = "recon_"
        streamRaster = prefix + "streamRaster"

        # Determine DEM cell size and OID column name
        size = arcpy.GetRasterProperties_management(DEM, "CELLSIZEX")
        OIDField = arcpy.Describe(streamNetwork).OIDFieldName

        # Convert stream network to raster
        arcpy.PolylineToRaster_conversion(streamNetwork, OIDField, streamRaster, "", "", size)

        # Work out distance of cells from stream
        distanceFromStream = EucDistance(streamRaster, "", size)

        # Elements within a buffer distance of the stream are smoothly dropped
        intSmoothDrop = Con(distanceFromStream > float(smoothDropBuffer), 0,
                            (float(smoothDrop) / float(smoothDropBuffer)) * (float(smoothDropBuffer) - distanceFromStream))
        del distanceFromStream

        # Burn this smooth drop into DEM. Cells in stream are sharply dropped by the value of "streamDrop"
        binaryStream = Con(IsNull(Raster(streamRaster)), 0, 1)
        reconDEMTemp = Raster(DEM) - intSmoothDrop - (float(streamDrop) * binaryStream)
        del intSmoothDrop
        del binaryStream
        
        reconDEMTemp.save(outputReconDEM)
        del reconDEMTemp

        log.info("Reconditioned DEM generated")

    except Exception:
        log.error("DEM reconditioning function failed")
        raise
def greennessMask(composite, ndi_thres, outpath, cutobject):
    '''
    composite: composite of RGB and TIR imagery produced during TIR 
               mosaicking and georeferencing
    ndi_thres: threshold for the greeness index. Pixels with values above will 
               be maintained
    outpath:   path to folder to save outputs
    cutobject: (optional) image which is clipped to the ndi threshold extent
    '''
                
    # Get raster bands from composite (RG values)
    red = Raster(composite + '\Band_1')
    green = Raster(composite + '\Band_2')
    # Get thermal and mask band
    mask = Raster(composite + '\Band_6')
                
    # Calculate greenness index NGRDI
    red = Float(red)
    green = Float(green)
    ndi = (green - red)/(green + red)
    
    del red
    del green
    
    ndi_clip = Con((mask == 65535), ndi, 0)
    del mask
    del ndi
    
    ndi_masked = Con(ndi_clip >= ndi_thres, ndi_clip, 0)
    del ndi_clip
                
    composite_name = os.path.split(composite)[1]
    ndi_file_masked = composite_name.replace('Composite', 'NDI')
    ndi_file_masked = ndi_file_masked.replace('_rect', '')
    # ndi_masked.save(os.path.join(outpath, ndi_file_masked))
            
    if cutobject:
        cutobj = Raster(cutobject)           
        masked_obj = Con(ndi_masked > ndi_thres, cutobj, -99)
        masked_obj_file = ndi_file_masked.replace('NDI', 'Obj')
        masked_obj.save(os.path.join(outpath, masked_obj_file))
Exemplo n.º 17
0
def applyLekUpliftModifierPost(postSeasonalHabitat, LekPresenceRaster,
                               LekDisturbanceModifier):
    """make the habitat quality of the post seasonal habitat raster equal
    to the lek disturbance/uplift modifier wherever the Lek Presence Raster
    is 1, ie a lek is present"""
    inRaster = LekPresenceRaster
    inTrueRaster = postSeasonalHabitat
    inFalseConstant = LekDisturbanceModifier
    whereClause = "VALUE = 0"

    LSDMpost = Con(inRaster, inTrueRaster, inFalseConstant, whereClause)
    return LSDMpost
def compound_topographic(elevation_input, flow_accumulation, raw_slope,
                         cti_output):
    """
    Description: calculates 32-bit float compound topographic index
    Inputs: 'elevation_input' -- an input raster digital elevation model
            'flow_accumulation' -- an input flow accumulation raster with the same spatial reference as the elevation raster
            'raw_slope' -- an input raw slope raster in degrees with the same spatial reference as the elevation raster
            'cti_output' -- an output compound topographic index raster
    Returned Value: Returns a raster dataset on disk
    Preconditions: requires input elevation, flow accumulation, and raw slope raster
    """

    # Import packages
    import arcpy
    from arcpy.sa import Con
    from arcpy.sa import Divide
    from arcpy.sa import Ln
    from arcpy.sa import Plus
    from arcpy.sa import Raster
    from arcpy.sa import Times
    from arcpy.sa import Tan

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Get spatial properties for the input elevation raster
    description = arcpy.Describe(elevation_input)
    cell_size = description.meanCellHeight

    # Convert degree slope to radian slope
    print('\t\tConverting degree slope to radians...')
    slope_radian = Divide(Times(Raster(raw_slope), 1.570796), 90)

    # Calculate slope tangent
    print('\t\tCalculating slope tangent...')
    slope_tangent = Con(slope_radian > 0, Tan(slope_radian), 0.001)

    # Correct flow accumulation
    print('\t\tModifying flow accumulation...')
    accumulation_corrected = Times(Plus(Raster(flow_accumulation), 1),
                                   cell_size)

    # Calculate compound topographic index as natural log of corrected flow accumulation divided by slope tangent
    print('\t\tCalculating compound topographic index...')
    out_raster = Ln(Divide(accumulation_corrected, slope_tangent))
    out_raster.save(cti_output)
Exemplo n.º 19
0
def getProximityWithBurnInGrid(classValuesList, excludedValuesList,
                               inLandCoverGrid, landCoverValues,
                               neighborhoodSize_str, burnIn, burnInGrid, timer,
                               rngRemap):

    # create class (value = 1) / other (value = 0) / excluded grid (value = 0) raster
    # define the reclass values
    classValue = 1
    excludedValue = 0
    otherValue = 0
    newValuesList = [classValue, excludedValue, otherValue]

    # generate a reclass list where each item in the list is a two item list: the original grid value, and the reclass value
    reclassPairs = getInOutOtherReclassPairs(landCoverValues, classValuesList,
                                             excludedValuesList, newValuesList)

    AddMsg((
        "{0} Reclassifying selected land cover class to 1. All other values = 0..."
    ).format(timer.split()))
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    AddMsg((
        "{0} Performing focal SUM on reclassified raster using {1} x {1} cell neighborhood..."
    ).format(timer.split(), neighborhoodSize_str))
    neighborhood = arcpy.sa.NbrRectangle(int(neighborhoodSize_str),
                                         int(neighborhoodSize_str), "CELL")
    focalGrid = arcpy.sa.FocalStatistics(reclassGrid == classValue,
                                         neighborhood, "SUM")

    AddMsg(("{0} Reclassifying focal SUM results into 20% breaks...").format(
        timer.split()))
    proximityGrid = Reclassify(focalGrid, "VALUE", rngRemap)

    if burnIn == "true":
        AddMsg(("{0} Burning excluded areas into proximity grid...").format(
            timer.split()))
        delimitedVALUE = arcpy.AddFieldDelimiters(burnInGrid, "VALUE")
        whereClause = delimitedVALUE + " = 0"
        proximityGrid = Con(burnInGrid, proximityGrid, burnInGrid, whereClause)

    return proximityGrid
Exemplo n.º 20
0
def raster_average(rasterobject_list, outras):
    from arcpy.sa import Con, SetNull

    n = len(rasterobject_list)

    # get mask
    rastermask_list = list()
    for each in rasterobject_list:
        eachmask = Con(each > 32760, 1, 0)
        rastermask_list.append(eachmask)

    sum_mask = rastermask_list[0]
    for each in rastermask_list[1:]:
        sum_mask += each

    # flip values and set null for mask
##    sum_mask = Con(sum_mask>0, None, 1)
    sum_mask = SetNull(sum_mask > 0, 1)

    # it doesn't honor mask
    outras_mask = r"C:\mask_temp.tif"
    sum_mask.save(outras_mask)

    # average, only operate on those valid values
    arcpy.env.mask = outras_mask

    ##    arcpy.env.mask = sum_mask.catalogPath

    sum_raster = rasterobject_list[0]
    for each in rasterobject_list[1:]:
        sum_raster += each

    avg_raster = sum_raster / n

    avg_raster.save(outras)

    # clear mask
    arcpy.env.mask = None
Exemplo n.º 21
0
def getViewGrid(classValuesList, excludedValuesList, inLandCoverGrid,
                landCoverValues, viewRadius, conValues, timer):
    # create class (value = 1) / other (value = 0) / excluded grid (value = 0) raster
    # define the reclass values
    classValue = 1
    excludedValue = 0
    otherValue = 0
    newValuesList = [classValue, excludedValue, otherValue]

    # generate a reclass list where each item in the list is a two item list: the original grid value, and the reclass value
    reclassPairs = getInOutOtherReclassPairs(landCoverValues, classValuesList,
                                             excludedValuesList, newValuesList)

    AddMsg((
        "{0} Reclassifying selected land cover class to 1. All other values = 0..."
    ).format(timer.split()))
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    AddMsg((
        "{0} Performing focal SUM on reclassified raster using {1} cell radius neighborhood..."
    ).format(timer.split(), viewRadius))
    neighborhood = arcpy.sa.NbrCircle(int(viewRadius), "CELL")
    focalGrid = arcpy.sa.FocalStatistics(reclassGrid == classValue,
                                         neighborhood, "SUM")

    AddMsg((
        "{0} Reclassifying focal SUM results into view = 0 and no-view = 1 binary raster..."
    ).format(timer.split()))
    #    delimitedVALUE = arcpy.AddFieldDelimiters(focalGrid,"VALUE")
    #    whereClause = delimitedVALUE+" = 0"
    #    viewGrid = Con(focalGrid, 1, 0, whereClause)
    whereValue = conValues[0]
    trueValue = conValues[1]
    viewGrid = Con(Raster(focalGrid) == whereValue, trueValue)
    return viewGrid
Exemplo n.º 22
0
def main(classification_file, bpi_broad_std, bpi_fine_std,
         slope, bathy, out_raster=None):
    """
    Perform raster classification, based on classification mappings
    and provided raster derivatives (fine- and broad- scale BPI,
    slope, and the original raster). Outputs a classified raster.
    """
    try:
        # set up scratch workspace
        # FIXME: see issue #18
        # CON is very very picky. it generates GRID outputs by default, and the
        # resulting names must not exist. for now, push our temp results
        # to the output folder.
        out_workspace = os.path.dirname(out_raster)
        # make sure workspace exists
        utils.workspace_exists(out_workspace)
        arcpy.env.scratchWorkspace = out_workspace
        arcpy.env.workspace = out_workspace

        arcpy.env.overwriteOutput = True
        # Create the broad-scale Bathymetric Position Index (BPI) raster
        msg_text = ("Generating the classified grid, based on the provided"
                    " classes in '{}'.".format(classification_file))
        utils.msg(msg_text)

        # Read in the BTM Document; the class handles parsing a variety of inputs.
        btm_doc = utils.BtmDocument(classification_file)
        classes = btm_doc.classification()
        utils.msg("Parsing {} document... found {} classes.".format(
            btm_doc.doctype, len(classes)))

        grids = []
        key = {'0': 'None'}
        for item in classes:
            cur_class = str(item["Class"])
            cur_name = str(item["Zone"])
            utils.msg("Calculating grid for {}...".format(cur_name))
            key[cur_class] = cur_name
            out_con = None
            # here come the CONs:
            out_con = run_con(item["Depth_LowerBounds"],
                              item["Depth_UpperBounds"],
                              bathy, cur_class)
            out_con2 = run_con(item["Slope_LowerBounds"],
                               item["Slope_UpperBounds"],
                               slope, out_con, cur_class)
            out_con3 = run_con(item["LSB_LowerBounds"],
                               item["LSB_UpperBounds"],
                               bpi_fine_std, out_con2, cur_class)
            out_con4 = run_con(item["SSB_LowerBounds"],
                               item["SSB_UpperBounds"],
                               bpi_broad_std, out_con3, cur_class)

            if type(out_con4) == arcpy.sa.Raster:
                rast = utils.save_raster(out_con4, "con_{}.tif".format(cur_name))
                grids.append(rast)
            else:
                # fall-through: no valid values detected for this class.
                warn_msg = ("WARNING, no valid locations found for class"
                            " {}:\n".format(cur_name))
                classifications = {
                    'depth': (item["Depth_LowerBounds"], item["Depth_UpperBounds"]),
                    'slope': (item["Slope_LowerBounds"], item["Slope_UpperBounds"]),
                    'broad': (item["SSB_LowerBounds"], item["SSB_UpperBounds"]),
                    'fine': (item["LSB_LowerBounds"], item["LSB_UpperBounds"])
                }
                for (name, vrange) in classifications.items():
                    (vmin, vmax) = vrange
                    if vmin or vmax is not None:
                        if vmin is None:
                            vmin = ""
                        if vmax is None:
                            vmax = ""
                        warn_msg += "  {}: {{{}:{}}}\n".format(name, vmin, vmax)

                utils.msg(textwrap.dedent(warn_msg))

        if len(grids) == 0:
            raise NoValidClasses

        utils.msg("Creating Benthic Terrain Classification Dataset...")
        merge_grid = grids[0]
        for i in range(1, len(grids)):
            utils.msg("{} of {}".format(i, len(grids)-1))
            merge_grid = Con(merge_grid, grids[i], merge_grid, "VALUE = 0")
        arcpy.AddField_management(merge_grid, 'Zone', 'TEXT')
        rows = arcpy.UpdateCursor(merge_grid)
        for row in rows:
            val = str(row.getValue('VALUE'))
            if val in key:
                row.setValue('Zone', key[val])
                rows.updateRow(row)
            else:
                row.setValue('Zone', 'No Matching Zone')
                rows.updateRow(row)
        # writing Python like it's C
        del(rows)
        del(row)

        arcpy.env.rasterStatistics = "STATISTICS"
        # validate the output raster path
        out_raster = utils.validate_path(out_raster)
        utils.msg("Saving Output to {}".format(out_raster))
        merge_grid.save(out_raster)

        utils.msg("Complete.")

    except NoValidClasses as e:
        utils.msg(e, mtype='error')
    except Exception as e:
        if type(e) is ValueError:
            raise e
        utils.msg(e, mtype='error')

    try:
        utils.msg("Deleting intermediate data...")
        # Delete all intermediate raster data sets
        for grid in grids:
            arcpy.Delete_management(grid.catalogPath)
    except Exception as e:
        # hack -- swallowing this exception, because sometimes
        # refs are left around for these files.
        utils.msg("Failed to delete all intermediate data.", mtype='warning')
Exemplo n.º 23
0
def main(classification_file,
         bpi_broad_std,
         bpi_fine_std,
         slope,
         bathy,
         out_raster=None):
    """
    Perform raster classification, based on classification mappings
    and provided raster derivatives (fine- and broad- scale BPI,
    slope, and the original raster). Outputs a classified raster.
    """
    try:
        # set up scratch workspace
        # FIXME: see issue #18
        # CON is very very picky. it generates GRID outputs by default, and the
        # resulting names must not exist. for now, push our temp results
        # to the output folder.
        out_workspace = os.path.dirname(out_raster)
        # make sure workspace exists
        utils.workspace_exists(out_workspace)
        arcpy.env.scratchWorkspace = out_workspace
        arcpy.env.workspace = out_workspace

        arcpy.env.overwriteOutput = True
        # Create the broad-scale Bathymetric Position Index (BPI) raster
        msg_text = ("Generating the classified grid, based on the provided"
                    " classes in '{}'.".format(classification_file))
        utils.msg(msg_text)

        # Read in the BTM Document; the class handles parsing a variety of inputs.
        btm_doc = utils.BtmDocument(classification_file)
        classes = btm_doc.classification()
        utils.msg("Parsing {} document... found {} classes.".format(
            btm_doc.doctype, len(classes)))

        grids = []
        for item in classes:
            cur_class = str(item["Class"])
            cur_name = str(item["Zone"])
            utils.msg("Calculating grid for {}...".format(cur_name))
            out_con = None
            # here come the CONs:
            out_con = run_con(item["Depth_LowerBounds"],
                              item["Depth_UpperBounds"], bathy, cur_class)
            out_con2 = run_con(item["Slope_LowerBounds"],
                               item["Slope_UpperBounds"], slope, out_con,
                               cur_class)
            out_con3 = run_con(item["LSB_LowerBounds"],
                               item["LSB_UpperBounds"], bpi_fine_std, out_con2,
                               cur_class)
            out_con4 = run_con(item["SSB_LowerBounds"],
                               item["SSB_UpperBounds"], bpi_broad_std,
                               out_con3, cur_class)

            if type(out_con4) == arcpy.sa.Raster:
                rast = utils.save_raster(out_con4,
                                         "con_{}.tif".format(cur_name))
                grids.append(rast)
            else:
                # fall-through: no valid values detected for this class.
                warn_msg = ("WARNING, no valid locations found for class"
                            " {}:\n".format(cur_name))
                classifications = {
                    'depth':
                    (item["Depth_LowerBounds"], item["Depth_UpperBounds"]),
                    'slope':
                    (item["Slope_LowerBounds"], item["Slope_UpperBounds"]),
                    'broad':
                    (item["SSB_LowerBounds"], item["SSB_UpperBounds"]),
                    'fine': (item["LSB_LowerBounds"], item["LSB_UpperBounds"])
                }
                for (name, vrange) in classifications.items():
                    (vmin, vmax) = vrange
                    if vmin or vmax is not None:
                        if vmin is None:
                            vmin = ""
                        if vmax is None:
                            vmax = ""
                        warn_msg += "  {}: {{{}:{}}}\n".format(
                            name, vmin, vmax)

                utils.msg(textwrap.dedent(warn_msg))

        if len(grids) == 0:
            raise NoValidClasses

        utils.msg("Creating Benthic Terrain Classification Dataset...")
        merge_grid = grids[0]
        for i in range(1, len(grids)):
            utils.msg("{} of {}".format(i, len(grids) - 1))
            merge_grid = Con(merge_grid, grids[i], merge_grid, "VALUE = 0")

        arcpy.env.rasterStatistics = "STATISTICS"
        # validate the output raster path
        out_raster = utils.validate_path(out_raster)
        utils.msg("Saving Output to {}".format(out_raster))
        merge_grid.save(out_raster)
        utils.msg("Complete.")

    except NoValidClasses as e:
        utils.msg(e, mtype='error')
    except Exception as e:
        if type(e) is ValueError:
            raise e
        utils.msg(e, mtype='error')

    try:
        utils.msg("Deleting intermediate data...")
        # Delete all intermediate raster data sets
        for grid in grids:
            arcpy.Delete_management(grid.catalogPath)
    except Exception as e:
        # hack -- swallowing this exception, because sometimes
        # refs are left around for these files.
        utils.msg("Failed to delete all intermediate data.", mtype='warning')
Exemplo n.º 24
0
def main(in_raster=None, neighborhood_size=None, out_raster=None):
    """
    Compute terrain ruggedness, using the vector ruggedness measure (VRM),
    as described in:

        Sappington et al., 2007. Quantifying Landscape Ruggedness for
        Animal Habitat Analysis: A Case Study Using Bighorn Sheep in the
        Mojave Desert. Journal of Wildlife Management. 71(5): 1419 -1426.
    """
    hood_size = int(neighborhood_size)

    # FIXME: expose this as an option per #18
    w = utils.Workspace()
    if w.exists:
        out_workspace = w.path
    else:
        out_workspace = os.path.dirname(out_raster)
    utils.workspace_exists(out_workspace)
    # force temporary stats to be computed in our output workspace
    arcpy.env.scratchWorkspace = out_workspace
    arcpy.env.workspace = out_workspace

    # TODO expose as config
    pyramid_orig = arcpy.env.pyramid
    arcpy.env.pyramid = "NONE"
    # TODO: currently set to automatically overwrite, expose this as option
    arcpy.env.overwriteOutput = True
    arcpy.env.compression = 'LZW'

    try:
        # Create Slope and Aspect rasters
        utils.msg("Calculating aspect...")
        out_aspect = Aspect(in_raster)
        utils.msg("Calculating slope...")
        out_slope = Slope(in_raster, "DEGREE")

        # Convert Slope and Aspect rasters to radians
        utils.msg("Converting slope and aspect to radians...")
        slope_rad = out_slope * (math.pi / 180)
        aspect_rad = out_aspect * (math.pi / 180)

        # Calculate x, y, and z rasters
        utils.msg("Calculating x, y, and z rasters...")
        xy_raster_calc = Sin(slope_rad)
        z_raster_calc = Cos(slope_rad)
        x_raster_calc = Con(out_aspect == -1, 0,
                            Sin(aspect_rad)) * xy_raster_calc
        y_raster_calc = Con(out_aspect == -1, 0,
                            Cos(aspect_rad)) * xy_raster_calc

        # Calculate sums of x, y, and z rasters for selected neighborhood size
        utils.msg("Calculating sums of x, y, and z rasters in neighborhood...")
        hood = NbrRectangle(hood_size, hood_size, "CELL")
        x_sum_calc = FocalStatistics(x_raster_calc, hood, "SUM", "NODATA")
        y_sum_calc = FocalStatistics(y_raster_calc, hood, "SUM", "NODATA")
        z_sum_calc = FocalStatistics(z_raster_calc, hood, "SUM", "NODATA")

        # Calculate the resultant vector
        utils.msg("Calculating the resultant vector...")
        result_vect = (x_sum_calc**2 + y_sum_calc**2 + z_sum_calc**2)**0.5

        arcpy.env.rasterStatistics = "STATISTICS"
        arcpy.env.pyramid = pyramid_orig
        # Calculate the Ruggedness raster
        utils.msg("Calculating the final ruggedness raster...")
        ruggedness = 1 - (result_vect / hood_size**2)

        out_raster = utils.validate_path(out_raster)
        utils.msg("Saving ruggedness raster to to {}.".format(out_raster))
        arcpy.CopyRaster_management(ruggedness, out_raster)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 25
0
# now that we have all out data we can run the decision tree

arcpy.AddMessage("Classifying cover")
progress += 1

# Con(param, where_clause="Value<=XXX", in_true_raster_or_constant=YYY, in_false_raster_or_constant=ZZZ)

if class_type == 'ternary':
    other_val = 0
    veg_val = 1
    tree_val = 2
    classified = Con(dhm,
                     where_clause="Value<=3.524",
                     in_true_raster_or_constant=Con(
                         dsm_sl,
                         where_clause="Value<=3.205",
                         in_true_raster_or_constant=other_val,
                         in_false_raster_or_constant=veg_val),
                     in_false_raster_or_constant=tree_val)
elif class_type == 'binary':
    other_val = 0
    tree_val = 1
    classified = Con(dsm_sl,
                     where_clause="Value<=19.241",
                     in_true_raster_or_constant=other_val,
                     in_false_raster_or_constant=Con(
                         dhm,
                         where_clause="Value<=0",
                         in_true_raster_or_constant=other_val,
                         in_false_raster_or_constant=tree_val))
Exemplo n.º 26
0
def arc_catch_del(WD, boundary_shp, sites_shp, site_num_col='site', point_dis=1000, stream_depth=10, grid_size=8, pour_dis=20, streams='S:/Surface Water/shared\\GIS_base\\vector\\MFE_REC_rivers_no_1st.shp', dem='S:/Surface Water/shared\\GIS_base\\raster\\DEM_8m_2012\\linz_8m_dem', export_dir='results', overwrite_rasters=False):
    """
    Arcpy function to delineate catchments based on specific points, a polygon, and the REC rivers layer.
    Arcpy must be installed.
    Be careful that the folder path isn't too long!!! Do not have spaces in the path name!!! Arc sucks!!!

    Parameters:
    WD: str
        Working directory.
    boundary_shp: str
        The path to the shapefile polygon boundary extent.
    sites_shp: str
        The path to the sites shapefile.
    site_num_col: str
        The column in the sites_shp that contains the site IDs.
    point_dis: int
        The max distance to snap the sites to the nearest stream line.
    stream_depth: int
        The depth that the streams shapefile should be burned into the dem.
    grid_size: int
        The resolution of the dem.
    streams: str
        The path to the streams shapefile.
    dem: str
        The path to the dem.
    export_dir: str
        The subfolder where the results should be saved.
    overwrite_rasters: bool
        Should the flow direction and flow accumulation rasters be overwritten?

    Returns
    -------
    None
    """
    # load in the necessary arcpy libraries to import arcpy
    sys.path.append('C:\\Python27\\ArcGIS10.4\\Lib\\site-packages')
    sys.path.append(r'C:\Program Files (x86)\ArcGIS\Desktop10.4\arcpy')
    sys.path.append(r'C:\Program Files (x86)\ArcGIS\Desktop10.4\ArcToolbox\Scripts')
    sys.path.append(r'C:\Program Files (x86)\ArcGIS\Desktop10.4\bin')
    sys.path.append('C:\\Python27\\ArcGIS10.4\\lib')

    # Import packages
    import arcpy
    from arcpy import env
    from arcpy.sa import Raster, Con, IsNull, FlowDirection, FlowAccumulation, Fill, SnapPourPoint, Watershed
    #import ArcHydroTools as ah

    # Check out spatial analyst license
    arcpy.CheckOutExtension('Spatial')
    # Define functions

#    def snap_points(points, lines, distance):
#
#        import arcgisscripting, sys
#
#        gp = arcgisscripting.create()
#
#        # Load the Analysis toolbox so that the Near tool is available
#        gp.toolbox = "analysis"
#
#        # Perform the Near operation looking for the nearest line
#        # (from the lines Feature Class) to each point (from the
#        # points Feature Class). The third argument is the search
#        # radius - blank means to search as far as is needed. The
#        # fourth argument instructs the command to output the
#        # X and Y co-ordinates of the nearest point found to the
#        # NEAR_X and NEAR_Y fields of the points Feature Class
#        gp.near(points, lines, str(distance), "LOCATION")
#
#        # Create an update cursor for the points Feature Class
#        # making sure that the NEAR_X and NEAR_Y fields are included
#        # in the return data
#        rows = gp.UpdateCursor(points, "", "", "NEAR_X, NEAR_Y")
#
#        row = rows.Next()
#
#        # For each row
#        while row:
#            # Get the location of the nearest point on one of the lines
#            # (added to the file as fields by the Near operation above
#            new_x = row.GetValue("NEAR_X")
#            new_y = row.GetValue("NEAR_Y")
#
#            # Create a new point object with the new x and y values
#            point = gp.CreateObject("Point")
#            point.x = new_x
#            point.y = new_y
#
#            # Assign it to the shape field
#            row.shape = point
#
#            # Update the row data and move to the next row
#            rows.UpdateRow(row)
#            row = rows.Next()

    def snap_points(points, lines, distance):
        """
        Ogi's updated snap_points function.
        """

        points = arcpy.Near_analysis(points, lines, str(distance), "LOCATION")

        # Create an update cursor for the points Feature Class
        # making sure that the NEAR_X and NEAR_Y fields are included
        # in the return data
        with arcpy.da.UpdateCursor(points, ["NEAR_X", "NEAR_Y", "SHAPE@XY"]) as cursor:
            for row in cursor:
                x, y, shape_xy = row
                shape_xy = (x, y)
                cursor.updateRow([x, y, shape_xy])
        return(points)

    ### Parameters:
    ## input

    # Necessary to change
    env.workspace = WD
    boundary = boundary_shp
    sites_in = sites_shp

#    site_num_col = 'site'

    # May not be necessary to change
    final_export_dir = export_dir
#    streams = 'S:/Surface Water/shared\\GIS_base\\vector\\MFE_REC_rivers_no_1st.shp'
#    dem = 'S:/Surface Water/shared\\GIS_base\\raster\\DEM_8m_2012\\linz_8m_dem'

    env.extent = boundary
    arcpy.env.overwriteOutput = True

    ## output
    bound = 'bound_diss.shp'
    sites = 'sites_bound.shp'
    streams_loc = 'MFE_streams_loc.shp'
    dem_loc = 'dem_loc.tif'
    stream_diss = 'MFE_rivers_diss.shp'
    stream_rast = 'stream_rast.tif'
    dem_diff_tif = 'dem_diff.tif'
    dem_fill_tif = 'dem_fill.tif'
    fd_tif = 'fd1.tif'
    accu_tif = 'accu1.tif'
    catch_poly = 'catch_del.shp'

    if not os.path.exists(os.path.join(env.workspace, final_export_dir)):
        os.makedirs(os.path.join(env.workspace, final_export_dir))

    ##########################
    #### Processing

    ### Process sites and streams vectors

    # Dissolve boundary for faster processing
    arcpy.Dissolve_management(boundary, bound)

    # Clip sites and streams to boundary
    arcpy.Clip_analysis(streams, bound, streams_loc)
    arcpy.Clip_analysis(sites_in, bound, sites)

    # Snap sites to streams layer
    snap_points(sites, streams_loc, point_dis)

    # Dissolve stream network
    arcpy.Dissolve_management(streams_loc, stream_diss, "", "", "MULTI_PART", "DISSOLVE_LINES")

    # Add raster parameters to streams layer
    arcpy.AddField_management(stream_diss, "rast", "SHORT")
    arcpy.CalculateField_management(stream_diss, "rast", stream_depth, "PYTHON_9.3")

    ############################################
    ### Delineate catchments

    # Convert stream vector to raster
    arcpy.FeatureToRaster_conversion(stream_diss, 'rast', stream_rast, grid_size)

    ## Create the necessary flow direction and accumulation rasters if they do not already exist
    if os.path.exists(os.path.join(env.workspace, accu_tif)) & (not overwrite_rasters):
        accu1 = Raster(accu_tif)
        fd1 = Raster(fd_tif)
    else:
        # Clip the DEM to the study area
        print('clipping DEM to catchment area...')
        arcpy.Clip_management(dem, "1323813.1799 5004764.9257 1688157.0305 5360238.95", dem_loc, bound, "", "ClippingGeometry", "NO_MAINTAIN_EXTENT")

        # Fill holes in DEM
        print('Filling DEM...')
#        dem_fill = Fill(dem_loc)

        # Subtract stream raster from
        s_rast = Raster(stream_rast)
        dem_diff = Con(IsNull(s_rast), dem_loc, dem_loc - s_rast)
        dem_diff.save(dem_diff_tif)

        # Fill holes in DEM
        dem2 = Fill(dem_diff_tif)
        dem2.save(dem_fill_tif)

        # flow direction
        print('Flow direction...')
        fd1 = FlowDirection(dem2)
        fd1.save(fd_tif)

        # flow accu
        print('Flow accumulation...')
        accu1 = FlowAccumulation(fd1)
        accu1.save(accu_tif)

    # create pour points
    pp1 = SnapPourPoint(sites, accu1, pour_dis, site_num_col)

    # Determine the catchments for all points
    catch1 = Watershed(fd1, pp1)

    # Convert raster to polygon
    arcpy.RasterToPolygon_conversion(catch1, catch_poly, 'SIMPLIFY', 'Value')

    # Add in a field for the area of each catchment
    arcpy.AddField_management(catch_poly, "area_m2", "LONG")
    arcpy.CalculateField_management(catch_poly, "area_m2", 'round(!shape.area!)', "PYTHON_9.3")

    #### Check back in the spatial analyst license once done
    arcpy.CheckInExtension('Spatial')
Exemplo n.º 27
0
def build_cn_raster(landcover_raster,
                    lookup_csv,
                    soils_polygon,
                    soils_hydrogroup_field="SOIL_HYDRO",
                    reference_raster=None,
                    out_cn_raster=None):
    """Build a curve number raster from landcover raster, soils polygon, and a crosswalk between 
    landcover classes, soil hydro groups, and curve numbers.

    :param lookup_csv: [description]
    :type lookup_csv: [type]
    :param landcover_raster: [description]
    :type landcover_raster: [type]
    :param soils_polygon: polygon containing soils with a hydro classification. 
    :type soils_polygon: [type]
    :param soils_hydrogroup_field: [description], defaults to "SOIL_HYDRO" (from the NCRS soils dataset)
    :type soils_hydrogroup_field: str, optional
    :param out_cn_raster: [description]
    :type out_cn_raster: [type]    
    """

    # GP Environment ----------------------------
    msg("Setting up GP Environment...")
    # if reference_raster is provided, we use it to set the GP environment for
    # subsequent raster operations
    if reference_raster:
        if not isinstance(reference_raster, Raster):
            # read in the reference raster as a Raster object.
            reference_raster = Raster(reference_raster)
    else:
        reference_raster = Raster(landcover_raster)

    # set the snap raster, cell size, and extent, and coordinate system for subsequent operations
    env.snapRaster = reference_raster
    env.cellSize = reference_raster.meanCellWidth
    env.extent = reference_raster
    env.outputCoordinateSystem = reference_raster

    cs = env.outputCoordinateSystem.exportToString()

    # SOILS -------------------------------------

    msg("Processing Soils...")
    # read the soils polygon into a raster, get list(set()) of all cell values from the landcover raster
    soils_raster_path = so("soils_raster")
    PolygonToRaster_conversion(soils_polygon, soils_hydrogroup_field,
                               soils_raster_path, "CELL_CENTER")
    soils_raster = Raster(soils_raster_path)

    # use the raster attribute table to build a lookup of raster values to soil hydro codes
    # from the polygon (that were stored in the raster attribute table after conversion)
    if not soils_raster.hasRAT:
        msg("Soils raster does not have an attribute table. Building...",
            "warning")
        BuildRasterAttributeTable_management(soils_raster, "Overwrite")
    # build a 2D array from the RAT
    fields = ["Value", soils_hydrogroup_field]
    rows = [fields]
    # soils_raster_table = MakeTableView_management(soils_raster_path)
    with SearchCursor(soils_raster_path, fields) as sc:
        for row in sc:
            rows.append([row[0], row[1]])
    # turn that into a dictionary, where the key==soil hydro text and value==the raster cell value
    lookup_from_soils = {v: k for k, v in etl.records(rows)}
    # also capture a list of just the values, used to iterate conditionals later
    soil_values = [v['Value'] for v in etl.records(rows)]

    # LANDCOVER ---------------------------------
    msg("Processing Landcover...")
    if not isinstance(landcover_raster, Raster):
        # read in the reference raster as a Raster object.
        landcover_raster_obj = Raster(landcover_raster)
    landcover_values = []
    with SearchCursor(landcover_raster, ["Value"]) as sc:
        for row in sc:
            landcover_values.append(row[0])

    # LOOKUP TABLE ------------------------------
    msg("Processing Lookup Table...")
    # read the lookup csv, clean it up, and use the lookups from above to limit it to just
    # those values in the rasters
    t = etl\
        .fromcsv(lookup_csv)\
        .convert('utc', int)\
        .convert('cn', int)\
        .select('soil', lambda v: v in lookup_from_soils.keys())\
        .convert('soil', lookup_from_soils)\
        .select('utc', lambda v: v in landcover_values)

    # This gets us a table where we the landcover class (as a number) corresponding to the
    # correct value in the converted soil raster, with the corresponding curve number.

    # DETERMINE CURVE NUMBERS -------------------
    msg("Assigning Curve Numbers...")
    # Use that to reassign cell values using conditional map algebra operations
    cn_rasters = []
    for rec in etl.records(t):
        cn_raster_component = Con(
            (landcover_raster_obj == rec.utc) & (soils_raster == rec.soil),
            rec.cn, 0)
        cn_rasters.append(cn_raster_component)

    cn_raster = CellStatistics(cn_rasters, "MAXIMUM")

    # REPROJECT THE RESULTS -------------------
    msg("Reprojecting and saving the results....")
    if not out_cn_raster:
        out_cn_raster = so("cn_raster", "random", "in_memory")

    ProjectRaster_management(in_raster=cn_raster,
                             out_raster=out_cn_raster,
                             out_coor_system=cs,
                             resampling_type="NEAREST",
                             cell_size=env.cellSize)

    # cn_raster.save(out_cn_raster)
    return out_cn_raster
Exemplo n.º 28
0
def main():
    # GET PARAMETER VALUES
    Map_Units_Provided = arcpy.GetParameterAsText(0)  # optional
    Proposed_Modified_Features_Provided = arcpy.GetParameterAsText(
        1)  # optional
    Project_Name = arcpy.GetParameterAsText(2)

    # DEFINE DIRECTORIES
    # Get the pathname to this script
    scriptPath = sys.path[0]
    arcpy.AddMessage("Script folder: " + scriptPath)
    arcpy.AddMessage("Python version: " + sys.version)
    # Construct pathname to workspace
    if Map_Units_Provided:
        projectGDB = arcpy.Describe(Map_Units_Provided).path
    elif Proposed_Modified_Features_Provided:
        projectGDB = arcpy.Describe(Proposed_Modified_Features_Provided).path
    else:
        arcpy.AddMessage("Please provide either a Map_Units or " +
                         "Proposed_Modified_Features layer.")
        sys.exit(0)
    arcpy.AddMessage("Project geodatabase: " + projectGDB)
    Project_Folder = arcpy.Describe(projectGDB).path
    arcpy.AddMessage("Project folder:" + Project_Folder)

    # Instantiate a cheStandard object
    cheStandard = cohqt.cheStandard(projectGDB, scriptPath)

    # ENVIRONMENT SETTINGS
    # Set workspaces
    arcpy.env.workspace = projectGDB
    scratch_folder = os.path.join(arcpy.Describe(projectGDB).path, 'scratch')
    if arcpy.Exists(scratch_folder):
        pass
    else:
        arcpy.CreateFolder_management(
            arcpy.Describe(projectGDB).path, 'scratch')
    arcpy.env.scratchWorkspace = scratch_folder
    # Overwrite outputs
    arcpy.env.overwriteOutput = True

    # DEFINE GLOBAL VARIABLES
    Parameter_Values = cheStandard.ParameterValues
    ConiferModifier = cheStandard.ConiferModifier
    GrSG_LDI = cheStandard.GrSG_LDI
    LekPresenceRaster = cheStandard.LekPresenceRaster
    Lek_Distance_Modifier = cheStandard.LekDistanceModifier
    SageModifier = cheStandard.SageModifier
    GrSG_Habitat = cheStandard.GrSGHabitatRaster
    MigrationModifier = cheStandard.MuleDeerMigrationMod
    WinterModifier = cheStandard.MuleDeerWinterMod
    SummerModifier = cheStandard.MuleDeerSummerMod
    MuleDeer_LDI = cheStandard.MuleDeerLDI
    emptyRaster = cheStandard.EmptyRaster
    BWMD_Open = cheStandard.BWMD_Open
    GrSG_Range = cheStandard.GrSGHabitat
    Mule_Range = cheStandard.MuleDeerHabitat
    cellSize = arcpy.GetRasterProperties_management(emptyRaster,
                                                    "CELLSIZEX").getOutput(0)

    # Filenames for feature classes or rasters used by this script
    MAP_UNITS = "Map_Units"
    PROPOSED_MODIFIED_FEATURES = "Proposed_Modified_Features"
    CREDIT_PROJECT_AREA = "Credit_Project_Area"
    CONIFER_TREATMENT_AREA = "Conifer_Treatment_Area"

    # Filenames for feature class and rasters created by this script
    INDIRECT_IMPACT_AREA = "Indirect_Impact_Area"
    ANALYSIS_AREA = "Analysis_Area"
    MAP_UNITS_DISSOLVE = "Map_Units_Dissolve"
    # GrSG Filenames
    CURRENT_ANTHRO_DISTURBANCE = "GRSG_Pre_Anthro_Disturbance"
    PROJECTED_ANTHRO_DISTURBANCE = "GRSG_Post_Anthro_Disturbance"
    GRSG_PRE_BREEDING = "GRSG_Pre_Breeding"
    GRSG_PRE_SUMMER = "GRSG_Pre_Summer"
    GRSG_PRE_WINTER = "GRSG_Pre_Winter"
    GRSG_POST_BREEDING = "GRSG_Post_Breeding"
    GRSG_POST_SUMMER = "GRSG_Post_Summer"
    GRSG_POST_WINTER = "GRSG_Post_Winter"
    POST_CONIFER_MODIFIER = "Post_Conifer_Modifier"
    # Mule Deer Filenames
    CURRENT_ANTHRO_DISTURBANCE_MD = "MuleDeer_Pre_Anthro_Disturbance"
    PROJECTED_ANTHRO_DISTURBANCE_MD = "MuleDeer_Post_Anthro_Disturbance"
    MULE_PRE_SUMMER = "MuleDeer_Pre_Summer"
    MULE_PRE_MIGRATION = "MuleDeer_Pre_Migration"
    MULE_PRE_WINTER = "MuleDeer_Pre_Winter"
    MULE_POST_SUMMER = "MuleDeer_Post_Summer"
    MULE_POST_MIGRATION = "MuleDeer_Post_Migration"
    MULE_POST_WINTER = "MuleDeer_Post_Winter"

    # ------------------------------------------------------------------------

    # FUNCTION CALLS
    # Check out Spatial Analyst extension
    hqtlib.CheckOutSpatialAnalyst()

    # Check provided layers
    if not Map_Units_Provided and not Proposed_Modified_Features_Provided:
        arcpy.AddError("ERROR:: Please provide a 'Map_Units' and/or "
                       "'Proposed_Modified_Features' feature.")
        sys.exit(0)

    if not Proposed_Modified_Features_Provided:
        # Ensure Proposed_Modified_Features does not exist
        if arcpy.Exists("Proposed_Modified_Features"):
            arcpy.AddError("ERROR:: A 'Proposed_Modified_Features' layer "
                           "was detected in the project's geodatabase. "
                           "Provide the 'Proposed_Modified_Features' layer "
                           "and re-run Credit Tool 2.")
            sys.exit(0)

    if Map_Units_Provided:
        # Clear selection, if present
        util.ClearSelectedFeatures(Map_Units_Provided)

        # Check provided layer
        feature = Map_Units_Provided
        required_fields = ["Map_Unit_ID", "Map_Unit_Name"]
        no_null_fields = ["Map_Unit_ID"]
        expected_fcs = [CREDIT_PROJECT_AREA]
        hqtlib.CheckPolygonInput(feature, required_fields, expected_fcs,
                                 no_null_fields)

        # Update Map Units layer with provided layer
        provided_input = Map_Units_Provided
        parameter_name = MAP_UNITS
        preserve_existing = False
        Map_Units = util.AdoptParameter(provided_input, parameter_name,
                                        preserve_existing)

        # Add Map Units layer to map
        layerFile = cheStandard.getLayerFile("MapUnits.lyr")
        util.AddToMap(Map_Units, layerFile)

        # Provide location of Credit Project Area
        Credit_Project_Area = CREDIT_PROJECT_AREA

    if Proposed_Modified_Features_Provided:
        # Clear selection, if present
        util.ClearSelectedFeatures(Proposed_Modified_Features_Provided)

        # Check provided layer
        required_fields = ["Type", "Subtype"]
        no_null_fields = required_fields
        expected_fcs = None
        hqtlib.CheckPolygonInput(Proposed_Modified_Features_Provided,
                                 required_fields, expected_fcs, no_null_fields)

        # Update Proposed_Modified_Features with provided layer
        provided_input = Proposed_Modified_Features_Provided
        parameterName = PROPOSED_MODIFIED_FEATURES
        preserve_existing = False
        Proposed_Modified_Features = util.AdoptParameter(
            provided_input, parameterName, preserve_existing)

        # Add Proposed Modified Features layer to map
        layerFile = cheStandard.getLayerFile("DebitProjectArea.lyr")
        util.AddToMap(Proposed_Modified_Features, layerFile)

        # Update message
        arcpy.AddMessage("Creating the area of indirect benefit")

        # Create Credit_Project_Area for projects that propose to modify
        # anthropogenic features
        # Create the Indirect_Impact_Area
        in_data = Proposed_Modified_Features
        out_name = INDIRECT_IMPACT_AREA
        Indirect_Impact_Area = hqtlib.CreateIndirectImpactArea(
            in_data, Parameter_Values, out_name)

        # Add field "Indirect"
        input_feature = Indirect_Impact_Area
        fieldsToAdd = ["Indirect"]
        fieldTypes = ["TEXT"]
        util.AddFields(input_feature, fieldsToAdd, fieldTypes)

        # Update field 'Indirect' to equal 'True'
        with arcpy.da.UpdateCursor(Indirect_Impact_Area,
                                   fieldsToAdd) as cursor:
            for row in cursor:
                row[0] = "True"
                cursor.updateRow(row)

        if Map_Units_Provided:
            # Merge with Credit_Project_Boundary
            fileList = [Map_Units_Provided, Indirect_Impact_Area]
            out_name = "in_memory/Credit_Project_Boundary"
            Project_Area = arcpy.Union_analysis(fileList, out_name)
        else:
            Project_Area = Indirect_Impact_Area

        # Eliminate areas of non-habitat to create Credit_Project_Area
        out_name = CREDIT_PROJECT_AREA
        habitat_bounds = cheStandard.HabitatMgmtArea
        Credit_Project_Area = hqtlib.EliminateNonHabitat(
            Project_Area, out_name, habitat_bounds)

    # Detect habitat types impacted directly or indirectly
    is_grsg = cohqt.DetectHabitat(Credit_Project_Area, GrSG_Range)
    is_mule = cohqt.DetectHabitat(Credit_Project_Area, Mule_Range)

    # Update message
    arcpy.AddMessage("Dissolving all multi-part map units to create "
                     "Map_Units_Dissolve")

    # Dissolve Map Units
    in_features = MAP_UNITS
    allowable_fields = ["Map_Unit_ID", "Map_Unit_Name", "Indirect"]
    out_name = MAP_UNITS_DISSOLVE
    anthro_features = None
    Map_Units_Dissolve = hqtlib.DissolveMapUnits(in_features, allowable_fields,
                                                 out_name, anthro_features)

    # Update message
    arcpy.AddMessage("Adding Map_Units_Dissolve to map")

    # Add layer to map document
    feature = Map_Units_Dissolve
    layerFile = cheStandard.getLayerFile("MapUnits.lyr")
    util.AddToMap(feature, layerFile, zoom_to=True)

    # Update message
    arcpy.AddMessage("Calculating area in acres for each map unit")

    # Calculate Area
    hqtlib.CalcAcres(Map_Units_Dissolve)

    # Update message
    arcpy.AddMessage("Adding transect field to Map Units Dissolve")

    # Add transects field to map units table
    fields = ["Transects"]
    fieldTypes = ["SHORT"]
    util.AddFields(Map_Units_Dissolve, fields, fieldTypes)

    # Update message
    arcpy.AddMessage("Creating Analysis Area")

    # Create Analysis Area
    out_name = ANALYSIS_AREA
    Analysis_Area = hqtlib.CreateAnalysisArea(Credit_Project_Area,
                                              Parameter_Values, out_name)

    # Add Analysis_Area to map
    layerFile = cheStandard.getLayerFile("AnalysisArea.lyr")
    util.AddToMap(Analysis_Area, layerFile, zoom_to=True)

    # Set processing extent to Analysis_Area
    arcpy.env.extent = ANALYSIS_AREA

    ### GREATER SAGE-GROUSE ANTHRO DIST & MODIFIERS ###
    if is_grsg:

        # Update message
        arcpy.AddMessage("Calculating proportion of each map unit within 1 km "
                         "of a lek")

        # Calculate proportion of map unit within 1 km of a lek
        inZoneData = Map_Units_Dissolve
        inValueRaster = cheStandard.LekPresenceRaster
        zoneField = "Map_Unit_ID"
        outTable = "Proportion_Lek"
        hqtlib.CalcZonalStats(inZoneData, zoneField, inValueRaster, outTable)

        # Join the zonal statistic to the Map Units Dissolve table
        field_name = "PropLek"
        hqtlib.JoinMeanToTable(inZoneData, outTable, zoneField, field_name)

        # Update message
        arcpy.AddMessage(
            "Calculating proportion of each map unit in the mesic "
            "precip zone")

        # Calculate Proportion of each map unit in the mesic precip zone
        inZoneData = Map_Units_Dissolve
        inValueRaster = cheStandard.Precip
        zoneField = "Map_Unit_ID"
        outTable = "Proportion_Mesic"
        hqtlib.CalcZonalStats(inZoneData, zoneField, inValueRaster, outTable)

        # Join the zonal statistic to the Map Units Dissolve table
        field_name = "PropMesic"
        hqtlib.JoinMeanToTable(inZoneData, outTable, zoneField, field_name)

        # Update message
        arcpy.AddMessage("Calculating pre-project anthropogenic "
                         "disturbance modifier for greater sage-grouse")

        # Calculate Current_Anthro_Disturbance
        dist_field = "GrSG_Dist"
        weight_field = "GrSG_Weight"
        term = cheStandard.CreditTerms[0]
        unique_proposed_subtypes = []
        anthro_disturbance_type = "Pre"

        Current_Anthro_Disturbance = cohqt.CalcAnthroDisturbance(
            Parameter_Values, term, unique_proposed_subtypes,
            anthro_disturbance_type, cheStandard, dist_field, weight_field,
            cellSize, emptyRaster)
        Current_Anthro_Disturbance.save(CURRENT_ANTHRO_DISTURBANCE)

        # Update message
        arcpy.AddMessage("Current_Anthro_Disturbance Calculated")
        arcpy.AddMessage("Calculating Pre-Project Habitat Modifiers for"
                         "Greater Sage-Grouse")

        # Calculate pre-project cumulative habitat modifiers
        winterHabitatPre = cohqt.calcWinterHabitatGRSG(
            Current_Anthro_Disturbance, ConiferModifier, GrSG_LDI,
            GrSG_Habitat)
        LSDMWinterPre = cohqt.applyLekUpliftModifierPre(
            winterHabitatPre, LekPresenceRaster)
        breedingHabitatPre = cohqt.calcBreedingHabitatGRSG(
            Current_Anthro_Disturbance, ConiferModifier, GrSG_LDI,
            Lek_Distance_Modifier, GrSG_Habitat)
        LSDMBreedingPre = cohqt.applyLekUpliftModifierPre(
            breedingHabitatPre, LekPresenceRaster)
        summerHabitatPre = cohqt.calcSummerHabitatGRSG(
            Current_Anthro_Disturbance, ConiferModifier, GrSG_LDI,
            SageModifier, GrSG_Habitat)
        LSDMSummerPre = cohqt.applyLekUpliftModifierPre(
            summerHabitatPre, LekPresenceRaster)
        seasonalHabitatRasters = [
            LSDMWinterPre, LSDMBreedingPre, LSDMSummerPre
        ]

        # Save outputs
        # winterHabitatPre.save(GRSG_PRE_WINTER)
        LSDMWinterPre.save(GRSG_PRE_WINTER)
        # breedingHabitatPre.save(GRSG_PRE_BREEDING)
        LSDMBreedingPre.save(GRSG_PRE_BREEDING)
        # summerHabitatPre.save(GRSG_PRE_SUMMER)
        LSDMSummerPre.save(GRSG_PRE_SUMMER)

        # Initialize list of uplift rasters to combine for LekUpliftModifier
        upliftRasters = []
        if arcpy.Exists(CONIFER_TREATMENT_AREA):
            # Calculate post-project conifer modifier
            Conifer_Cover = cheStandard.ConiferCover
            coniferModifierPost = cohqt.calcConiferPost(
                CONIFER_TREATMENT_AREA, Conifer_Cover)
            coniferModifierPost.save(POST_CONIFER_MODIFIER)

            # Calculate uplift from conifer removal
            coniferUplift = cohqt.calcUplift(ConiferModifier,
                                             coniferModifierPost)
            upliftRasters.append(coniferUplift)

        else:
            coniferModifierPost = ConiferModifier

        if arcpy.Exists(PROPOSED_MODIFIED_FEATURES):
            # Prepare proposed anthropogenic features
            unique_proposed_subtypes = cohqt.convertProposedToRasterCredit(
                PROPOSED_MODIFIED_FEATURES, cellSize)

            anthroPath = cheStandard.AnthroFeaturePath
            cohqt.combineProposedWithCurrentCredit(anthroPath,
                                                   unique_proposed_subtypes)

            # Update message
            arcpy.AddMessage("Calculating post-project anthropogenic "
                             "disturbance modifier for greater sage-grouse")

            # Calculate post-project anthropogenic disturbance
            term = cheStandard.CreditTerms[1]
            anthro_disturbance_type = "Post"

            Projected_Anthro_Disturbance = cohqt.CalcAnthroDisturbance(
                Parameter_Values, term, unique_proposed_subtypes,
                anthro_disturbance_type, cheStandard, dist_field, weight_field,
                cellSize, emptyRaster)

            Projected_Anthro_Disturbance.save(PROJECTED_ANTHRO_DISTURBANCE)

            # Update message
            arcpy.AddMessage("Projected_Anthro_Disturbance Calculated")

            # Calculate uplift from anthro feature removal
            anthroUplift = cohqt.calcUplift(Current_Anthro_Disturbance,
                                            Projected_Anthro_Disturbance)
            upliftRasters.append(anthroUplift)

            # Update message
            arcpy.AddMessage(
                "Merging indirect benefits area and map units layer")

            # Combine the Map Units layer and Indirect Impact Layer
            indirect_benefit_area = CREDIT_PROJECT_AREA
            mgmt_map_units = Map_Units_Dissolve
            Map_Units_Dissolve = hqtlib.AddIndirectBenefitArea(
                indirect_benefit_area, mgmt_map_units)

        else:
            Projected_Anthro_Disturbance = Current_Anthro_Disturbance

            # Add Indirect field to Map Units layer and populate with False
            # Add field "Indirect"
            feature = Map_Units_Dissolve
            fieldsToAdd = ["Indirect"]
            fieldTypes = ["TEXT"]
            util.AddFields(feature, fieldsToAdd, fieldTypes)

            # Update field to equal "False"
            with arcpy.da.UpdateCursor(feature, fieldsToAdd) as cursor:
                for row in cursor:
                    row[0] = "False"
                    cursor.updateRow(row)

        # Calc zonal stats for pre-project modifiers (three seasons)
        term = cheStandard.CreditTerms[0]
        for season, raster in zip(cheStandard.GrSGSeasons,
                                  seasonalHabitatRasters):
            # Update message
            arcpy.AddMessage("Summarizing GrSG " + term + " " + season)

            # Calculate zonal statistics for each map unit
            inZoneData = Map_Units_Dissolve
            inValueRaster = raster
            zoneField = "Map_Unit_ID"
            outTable = "GrSG_Stats_" + term + "_" + season
            hqtlib.CalcZonalStats(inZoneData, zoneField, inValueRaster,
                                  outTable)

            # Join the zonal statistic to the Map Units Dissolve table
            field_name = "GrSG_" + term + "_" + season
            hqtlib.JoinMeanToTable(inZoneData, outTable, zoneField, field_name)

        if arcpy.Exists("Conifer_Treatment_Area") or \
                arcpy.Exists("Anthro_Features_Removed"):

            # Update message
            arcpy.AddMessage("Calculating Lek Uplift Modifier")

            # Calculate Lek Uplift Modifier
            lekUpliftModifier = cohqt.calcLekUpliftModifier(
                LekPresenceRaster, upliftRasters)
            lekUpliftModifier.save("Lek_Uplift_Modifier")

            # Update message
            arcpy.AddMessage("Calculating Post-Project Habitat Modifiers")

            # Calculate post-project cumulative habtiat modifiers
            winterHabitatPost = cohqt.calcWinterHabitatGRSG(
                Projected_Anthro_Disturbance, ConiferModifier, GrSG_LDI,
                GrSG_Habitat)
            LSDMWinterPost = cohqt.applyLekUpliftModifierPost(
                winterHabitatPost, LekPresenceRaster, lekUpliftModifier)
            breedingHabitatPost = cohqt.calcBreedingHabitatGRSG(
                Projected_Anthro_Disturbance, ConiferModifier, GrSG_LDI,
                Lek_Distance_Modifier, GrSG_Habitat)
            LSDMBreedingPost = cohqt.applyLekUpliftModifierPost(
                breedingHabitatPost, LekPresenceRaster, lekUpliftModifier)
            summerHabitatPost = cohqt.calcSummerHabitatGRSG(
                Projected_Anthro_Disturbance, ConiferModifier, GrSG_LDI,
                SageModifier, GrSG_Habitat)
            LSDMSummerPost = cohqt.applyLekUpliftModifierPost(
                summerHabitatPost, LekPresenceRaster, lekUpliftModifier)

            seasonalHabitatRasters = [
                LSDMWinterPost, LSDMBreedingPost, LSDMSummerPost
            ]

            # Save outputs
            # winterHabitatPost.save("Post_Seasonal_Winter")
            LSDMWinterPost.save(GRSG_POST_WINTER)
            # breedingHabitatPost.save("Post_Seasonal_Breeding")
            LSDMBreedingPost.save(GRSG_POST_BREEDING)
            # summerHabitatPost.save("Post_Seasonal_Summer")
            LSDMSummerPost.save(GRSG_POST_SUMMER)

            # Calc zonal stats for post-project modifiers
            term = cheStandard.CreditTerms[1]
            for season, raster in zip(cheStandard.GrSGSeasons,
                                      seasonalHabitatRasters):
                # Update message
                arcpy.AddMessage("Summarizing GrSG " + term + " " + season)

                # Calculate zonal statistics for each map unit
                inZoneData = Map_Units_Dissolve
                inValueRaster = raster
                zoneField = "Map_Unit_ID"
                outTable = "GrSG_Stats_" + term + "_" + season
                hqtlib.CalcZonalStats(inZoneData, zoneField, inValueRaster,
                                      outTable)

                # Join the zonal statistic to the Map Units Dissolve table
                field_name = "GrSG_" + term + "_" + season
                hqtlib.JoinMeanToTable(inZoneData, outTable, zoneField,
                                       field_name)

        # Calculate Credit Intensity

    ### END GREATER SAGE-GROUSE ###

    ### MULE DEER ANTHRO DIST & MODIFIERS ###
    if is_mule:
        # Update message
        arcpy.AddMessage("Calculating pre-project anthropogenic disturbance "
                         "modifier for mule deer - process may repeat for "
                         "habitats in mixed PJ and open habitat")

        # # Calculat pre-project anthropogenic disturbance
        # dist_field = "MDO_Dist"
        # weight_field = "MDO_Weight"
        # term = cheStandard.CreditTerms[0]
        # unique_proposed_subtypes = []
        # anthro_disturbance_type = "Pre"
        #
        # Current_Anthro_Disturbance = hqtlib.cheCalcAnthroDisturbance(
        #     Parameter_Values, term, unique_proposed_subtypes,
        #     anthro_disturbance_type, cheStandard, dist_field, weight_field,
        #     cellSize, emptyRaster
        # )
        # Current_Anthro_Disturbance.save(CURRENT_ANTHRO_DISTURBANCE_MD)

        # Calculate pre-project anthropogenic disturbance
        # Calculate pre-project in PJ
        dist_field = "MDP_Dist"
        weight_field = "MDP_Weight"
        term = cheStandard.CreditTerms[0]
        unique_proposed_subtypes = []
        anthro_disturbance_type = "Pre"

        anthro_pj = cohqt.CalcAnthroDisturbance(Parameter_Values, term,
                                                unique_proposed_subtypes,
                                                anthro_disturbance_type,
                                                cheStandard, dist_field,
                                                weight_field, cellSize,
                                                emptyRaster)
        anthro_pj.save(CURRENT_ANTHRO_DISTURBANCE_MD + "_P")

        # Calculate pre-project in Open
        dist_field = "MDO_Dist"
        weight_field = "MDO_Weight"
        term = cheStandard.CreditTerms[0]
        unique_proposed_subtypes = []
        anthro_disturbance_type = "Pre"

        anthro_open = cohqt.CalcAnthroDisturbance(Parameter_Values,
                                                  term,
                                                  unique_proposed_subtypes,
                                                  anthro_disturbance_type,
                                                  cheStandard,
                                                  dist_field,
                                                  weight_field,
                                                  cellSize,
                                                  emptyRaster,
                                                  mask=BWMD_Open)
        anthro_open.save(CURRENT_ANTHRO_DISTURBANCE_MD + "_O")

        # Combine PJ and Open
        # If outside open, make 1
        anthro_open_only = Con(BWMD_Open == 1, anthro_open, 1)
        anthro_open_only.save(CURRENT_ANTHRO_DISTURBANCE_MD + "_OO")

        # Select minimum of pj and open rasters
        Current_Anthro_Disturbance = Con(anthro_open_only < anthro_pj,
                                         anthro_open_only, anthro_pj)
        Current_Anthro_Disturbance.save(CURRENT_ANTHRO_DISTURBANCE_MD)

        # Clean up
        arcpy.Delete_management("temp_masked_raster")
        # arcpy.Delete_management(CURRENT_ANTHRO_DISTURBANCE_MD + "_P")
        # arcpy.Delete_management(CURRENT_ANTHRO_DISTURBANCE_MD + "_O")
        # arcpy.Delete_management(CURRENT_ANTHRO_DISTURBANCE_MD + "_OO")

        # Update message
        arcpy.AddMessage("Calculating Pre-Project Habitat Modifiers")

        # Calculate pre-project cumulative habitat modifiers
        summerHabitatPre = cohqt.calcSummerHabitatMD(
            Current_Anthro_Disturbance,
            MuleDeer_LDI,
            SummerModifier,
            SuitableHabitat=None)
        # LSDMWinterPre = cohqt.applyLekUpliftModifierPre(summerHabitatPre,
        #                                                  LekPresenceRaster)
        migratoryHabitatPre = cohqt.calcMigratoryHabitatMD(
            Current_Anthro_Disturbance,
            MuleDeer_LDI,
            MigrationModifier,
            SuitableHabitat=None)
        # LSDMBreedingPre = cohqt.applyLekUpliftModifierPre(migratoryHabitatPre,
        #                                                    LekPresenceRaster)
        winterHabitatPre = cohqt.calcWinterHabitatMD(
            Current_Anthro_Disturbance,
            MuleDeer_LDI,
            WinterModifier,
            SuitableHabitat=None)
        # LSDMSummerPre = cohqt.applyLekUpliftModifierPre(winterHabitatPre,
        #                                                  LekPresenceRaster)
        seasonalHabitatRasters = [
            summerHabitatPre, migratoryHabitatPre, winterHabitatPre
        ]

        # Save outputs
        summerHabitatPre.save(MULE_PRE_SUMMER)
        # LSDMWinterPre.save("Pre_LSDM_Winter")
        migratoryHabitatPre.save(MULE_PRE_MIGRATION)
        # LSDMBreedingPre.save("Pre_LSDM_Breeding")
        winterHabitatPre.save(MULE_PRE_WINTER)
        # LSDMSummerPre.save("Pre_LSDM_Summer")

        # Update message
        arcpy.AddMessage("Current_Anthro_Disturbance Calculated")

        # Calc zonal stats for pre-project modifiers (three seasons)
        term = cheStandard.DebitTerms[0]
        for season, raster in zip(cheStandard.MuleDeerSeasons,
                                  seasonalHabitatRasters):
            # Update message
            arcpy.AddMessage("Summarizing Mule Deer " + term + " " + season)

            # Calculate zonal statistics for each map unit
            inZoneData = Map_Units_Dissolve
            inValueRaster = raster
            zoneField = "Map_Unit_ID"
            outTable = "Mule_Stats_" + term + "_" + season
            hqtlib.CalcZonalStats(inZoneData, zoneField, inValueRaster,
                                  outTable)

            # Join the zonal statistic to the Map Units Dissolve table
            field_name = "Mule_" + term + "_" + season
            hqtlib.JoinMeanToTable(inZoneData, outTable, zoneField, field_name)

        # # Calculate average of three seasonal habitat rasters pre-project
        # finalPreCumulative = hqtlib.calcAverageHabitatQuality(
        #     seasonalHabitatRasters
        # )
        # finalPreCumulative.save(CUMULATIVE_MODIFIER_PRE)

        if arcpy.Exists(PROPOSED_MODIFIED_FEATURES):
            # Update message
            arcpy.AddMessage("Calculating post-project anthropogenic "
                             "disturbance modifier")

            # Calculate post-project anthropogenic disturbance
            term = cheStandard.CreditTerms[1]
            anthro_disturbance_type = "Post"

            Projected_Anthro_Disturbance = cohqt.CalcAnthroDisturbance(
                Parameter_Values, term, unique_proposed_subtypes,
                anthro_disturbance_type, cheStandard, dist_field, weight_field,
                cellSize, emptyRaster)

            Projected_Anthro_Disturbance.save(PROJECTED_ANTHRO_DISTURBANCE_MD)

            # Update message
            arcpy.AddMessage("Projected_Anthro_Disturbance Calculated")

            # Calculate post-project cumulative habitat modifiers
            summerHabitatPost = cohqt.calcSummerHabitatMD(
                Projected_Anthro_Disturbance,
                MuleDeer_LDI,
                SummerModifier,
                SuitableHabitat=None)
            # LSDMWinterPost = cohqt.applyLekUpliftModifierPost(summerHabitatPost,
            #                                                  LekPresenceRaster)
            migratoryHabitatPost = cohqt.calcMigratoryHabitatMD(
                Projected_Anthro_Disturbance,
                MuleDeer_LDI,
                MigrationModifier,
                SuitableHabitat=None)
            # LSDMBreedingPost = cohqt.applyLekUpliftModifierPost(migratoryHabitatPost,
            #                                                    LekPresenceRaster)
            winterHabitatPost = cohqt.calcWinterHabitatMD(
                Projected_Anthro_Disturbance,
                MuleDeer_LDI,
                WinterModifier,
                SuitableHabitat=None)
            # LSDMSummerPost = cohqt.applyLekUpliftModifierPost(winterHabitatPost,
            #                                                  LekPresenceRaster)
            seasonalHabitatRasters = [
                summerHabitatPost, migratoryHabitatPost, winterHabitatPost
            ]

            # Save outputs
            summerHabitatPost.save(MULE_POST_SUMMER)
            # LSDMWinterPre.save("Pre_LSDM_Winter")
            migratoryHabitatPost.save(MULE_POST_MIGRATION)
            # LSDMBreedingPre.save("Pre_LSDM_Breeding")
            winterHabitatPost.save(MULE_POST_WINTER)
            # LSDMSummerPre.save("Pre_LSDM_Summer")

            # Calc zonal stats for pre-project modifiers (three seasons)
            term = cheStandard.DebitTerms[1]
            for season, raster in zip(cheStandard.MuleDeerSeasons,
                                      seasonalHabitatRasters):
                # Update message
                arcpy.AddMessage("Summarizing Mule Deer " + term + season)

                # Calculate zonal statistics for each map unit
                inZoneData = Map_Units_Dissolve
                inValueRaster = raster
                zoneField = "Map_Unit_ID"
                outTable = "Mule_Stats_" + term + season
                hqtlib.CalcZonalStats(inZoneData, zoneField, inValueRaster,
                                      outTable)

                # Join the zonal statistic to the Map Units Dissolve table
                field_name = "Mule_" + term + "_" + season
                hqtlib.JoinMeanToTable(inZoneData, outTable, zoneField,
                                       field_name)

            # # Calculate average of three seasonal habitat rasters post-project
            # finalPostCumulative = hqtlib.calcAverageHabitatQuality(
            #     seasonalHabitatRasters
            # )
            # finalPostCumulative.save(CUMULATIVE_MODIFIER_POST)

            # Calculate permanent cumulative habtiat modifiers

            # Update message
            arcpy.AddMessage("Calculating Mule Deer Benefit")

            # Calculate impact
            pre_fields = [
                "Mule_Pre_Summer", "Mule_Pre_Migration", "Mule_Pre_Winter"
            ]
            post_fields = [
                "Mule_Post_Summer", "Mule_Post_Migration", "Mule_Post_Winter"
            ]
            out_fields = [
                "Mule_Summer_Benefit", "Mule_Migration_Benefit",
                "Mule_Winter_Benefit"
            ]
            for i in range(len(pre_fields)):
                pre_field = pre_fields[i]
                post_field = post_fields[i]
                out_field = out_fields[i]
                cohqt.calcDebits(Map_Units_Dissolve, pre_field, post_field,
                                 out_field)

        # # Export data to Excel
        input_Tables = [MAP_UNITS_DISSOLVE]
        for table in input_Tables:
            hqtlib.ExportToExcel(table, Project_Folder, Project_Name)

    ### END MULE DEER ###
    if not is_grsg and not is_mule:
        arcpy.AddMessage("Impacts were not detected in any habitat type. "
                         "Please check credit project boundary and try "
                         "again")
    # Clean up
    for raster in arcpy.ListRasters("*_Subtype_Disturbance"):
        arcpy.Delete_management(raster)

    for raster in arcpy.ListRasters("*_Type_Disturbance"):
        arcpy.Delete_management(raster)

    arcpy.Delete_management("in_memory")

    # Save map document
    if arcpy.ListInstallations()[0] == 'arcgispro':
        p = arcpy.mp.ArcGISProject("CURRENT")
        p.save()
    else:
        mxd = arcpy.mapping.MapDocument("CURRENT")
        mxd.save()
Exemplo n.º 29
0
def getEdgeCoreGrid(m, lccObj, lccClassesDict, inLandCoverGrid,
                    PatchEdgeWidth_str, processingCellSize_str, timer,
                    shortName, scratchNameReference):
    # Get the lccObj values dictionary to determine if a grid code is to be included in the effective reporting unit area calculation
    lccValuesDict = lccObj.values
    #landCoverValues = raster.getRasterValues(inLandCoverGrid)
    landCoverValues = getRasterValues(inLandCoverGrid)

    # get the grid codes for this specified metric
    ClassValuesList = lccClassesDict[m].uniqueValueIds.intersection(
        landCoverValues)

    # get the frozenset of excluded values (i.e., values not to use when calculating the reporting unit effective area)
    ExcludedValueList = lccValuesDict.getExcludedValueIds().intersection(
        landCoverValues)

    # create grid where cover type of interest (e.g., forest) is coded 3, excluded values are coded 1, everything else is coded 2
    reclassPairs = []
    for val in landCoverValues:
        oldValNewVal = []
        oldValNewVal.append(val)
        if val in ClassValuesList:
            oldValNewVal.append(3)
            reclassPairs.append(oldValNewVal)
        elif val in ExcludedValueList:
            oldValNewVal.append(1)
            reclassPairs.append(oldValNewVal)
        else:
            oldValNewVal.append(2)
            reclassPairs.append(oldValNewVal)

    AddMsg(
        timer.split() +
        " Step 1 of 4: Reclassing land cover grid to Class = 3, Other = 2, and Excluded = 1..."
    )
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    AddMsg(timer.split() + " Step 2 of 4: Setting Class areas to Null...")
    delimitedVALUE = arcpy.AddFieldDelimiters(reclassGrid, "VALUE")
    otherGrid = SetNull(reclassGrid, 1, delimitedVALUE + " = 3")

    AddMsg(timer.split() + " Step 3 of 4: Finding distance from Other...")
    distGrid = EucDistance(otherGrid)

    AddMsg(timer.split() +
           " Step 4 of 4: Delimiting Class areas to Edge = 3 and Core = 4...")
    edgeDist = round(float(PatchEdgeWidth_str) * float(processingCellSize_str))

    zonesGrid = Con((distGrid >= edgeDist) & reclassGrid, 4, reclassGrid)

    # it appears that ArcGIS cannot process the BuildRasterAttributeTable request without first saving the raster.
    # This step wasn't the case earlier. Either ESRI changed things, or I altered something in ATtILA that unwittingly caused this. -DE
    namePrefix = shortName + "_" + "Raster" + m + PatchEdgeWidth_str
    scratchName = arcpy.CreateScratchName(namePrefix, "", "RasterDataset")
    scratchNameReference[0] = scratchName
    zonesGrid.save(scratchName)

    arcpy.BuildRasterAttributeTable_management(zonesGrid, "Overwrite")

    arcpy.AddField_management(zonesGrid, "CATEGORY", "TEXT", "#", "#", "10")
    updateCoreEdgeCategoryLabels(zonesGrid)

    return zonesGrid
Exemplo n.º 30
0
def createPatchRaster(m, lccObj, lccClassesDict, inLandCoverGrid, metricConst,
                      maxSeparation, minPatchSize, processingCellSize_str,
                      timer, scratchNameReference):
    # create a list of all the grid values in the selected landcover grid
    #landCoverValues = raster.getRasterValues(inLandCoverGrid)
    landCoverValues = getRasterValues(inLandCoverGrid)

    # for the selected land cover class, get the class codes found in the input landcover grid
    lccValuesDict = lccObj.values
    classValuesList = lccClassesDict[m].uniqueValueIds.intersection(
        landCoverValues)

    # get the frozenset of excluded values (i.e., values not to use when calculating the reporting unit effective area)
    excludedValuesList = lccValuesDict.getExcludedValueIds().intersection(
        landCoverValues)

    # create class (value = 3) / other (value = 0) / excluded grid (value = -9999) raster
    # define the reclass values
    classValue = metricConst.classValue
    excludedValue = metricConst.excludedValue
    otherValue = metricConst.otherValue
    newValuesList = [classValue, excludedValue, otherValue]

    # generate a reclass list where each item in the list is a two item list: the original grid value, and the reclass value
    reclassPairs = getInOutOtherReclassPairs(landCoverValues, classValuesList,
                                             excludedValuesList, newValuesList)

    AddMsg(timer.split() + " Reclassing land cover to Class:" + m + " = " +
           str(classValue) + ", Other = " + str(otherValue) +
           ", and Excluded = " + str(excludedValue) + "...")
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    # create patch raster where:
    #    clusters of cells within the input threshold distance are considered a single patch
    #    and patches below the input minimum size have been discarded

    # Ensure all parameter inputs are the appropriate number type
    intMaxSeparation = int(maxSeparation)
    intMinPatchSize = int(minPatchSize)

    # Check if Maximum Separation > 0 if it is then skip to regions group analysis otherwise run Euclidean distance
    if intMaxSeparation == 0:
        AddMsg(
            timer.split() +
            " Assigning unique numbers to each unconnected cluster of Class:" +
            m + "...")
        regionOther = RegionGroup(reclassGrid == classValue, "EIGHT", "CROSS",
                                  "ADD_LINK", "0")
    else:
        AddMsg(timer.split() + " Connecting clusters of Class:" + m +
               " within maximum separation distance...")
        fltProcessingCellSize = float(processingCellSize_str)
        maxSep = intMaxSeparation * float(processingCellSize_str)
        delimitedVALUE = arcpy.AddFieldDelimiters(reclassGrid, "VALUE")
        whereClause = delimitedVALUE + " < " + str(classValue)
        classRaster = SetNull(reclassGrid, 1, whereClause)
        eucDistanceRaster = EucDistance(classRaster, maxSep,
                                        fltProcessingCellSize)

        # Run Region Group analysis on UserEuclidPlus, ignores 0/NoData values
        AddMsg(
            timer.split() +
            " Assigning unique numbers to each unconnected cluster of Class:" +
            m + "...")
        UserEuclidRegionGroup = RegionGroup(eucDistanceRaster >= 0, "EIGHT",
                                            "CROSS", "ADD_LINK", "0")

        # Maintain the original boundaries of each patch
        regionOther = Con(reclassGrid == classValue, UserEuclidRegionGroup,
                          reclassGrid)

    if intMinPatchSize > 1:
        AddMsg(timer.split() +
               " Eliminating clusters below minimum patch size...")
        delimitedCOUNT = arcpy.AddFieldDelimiters(regionOther, "COUNT")
        whereClause = delimitedCOUNT + " < " + str(intMinPatchSize)
        regionOtherFinal = Con(regionOther, otherValue, regionOther,
                               whereClause)
    else:
        regionOtherFinal = regionOther

    # add the excluded class areas back to the raster if present
    if excludedValuesList:
        regionOtherExcluded = Con(reclassGrid == excludedValue, reclassGrid,
                                  regionOtherFinal)
    else:
        regionOtherExcluded = regionOtherFinal

    # The Patch Metrics tool appears to have trouble calculating its metrics when the raster area is large and the
    # regionOtherExcluded grid is treated as a raster object in memory and not saved as a raster on disk
    namePrefix = metricConst.shortName + "_" + m + "_PatchRast"
    scratchName = arcpy.CreateScratchName(namePrefix, "", "RasterDataset")
    regionOtherExcluded.save(scratchName)
    desc = arcpy.Describe(regionOtherExcluded)
    scratchNameReference[0] = desc.catalogPath

    return regionOtherExcluded
Exemplo n.º 31
0
def getLargePatchViewGrid(classValuesList, excludedValuesList, inLandCoverGrid,
                          landCoverValues, viewRadius, conValues,
                          minimumPatchSize, timer, saveIntermediates,
                          metricConst):
    # create class (value = 1) / other (value = 0) / excluded grid (value = 0) raster
    # define the reclass values
    classValue = 1
    excludedValue = 0
    otherValue = 0
    newValuesList = [classValue, excludedValue, otherValue]

    # generate a reclass list where each item in the list is a two item list: the original grid value, and the reclass value
    reclassPairs = getInOutOtherReclassPairs(landCoverValues, classValuesList,
                                             excludedValuesList, newValuesList)

    AddMsg((
        "{0} Reclassifying selected land cover class to 1. All other values = 0..."
    ).format(timer.split()))
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    ##calculate the big patches for LandCover

    AddMsg(("{0} Calculating size of excluded area patches...").format(
        timer.split()))
    regionGrid = RegionGroup(reclassGrid, "EIGHT", "WITHIN", "ADD_LINK")

    AddMsg(
        ("{0} Assigning {1} to patches >= minimum size threshold...").format(
            timer.split(), "1"))
    delimitedCOUNT = arcpy.AddFieldDelimiters(regionGrid, "COUNT")
    whereClause = delimitedCOUNT + " >= " + minimumPatchSize + " AND LINK = 1"
    burnInGrid = Con(regionGrid, classValue, 0, whereClause)

    # save the intermediate raster if save intermediates option has been chosen
    if saveIntermediates:
        namePrefix = metricConst.burnInGridName
        scratchName = arcpy.CreateScratchName(namePrefix, "", "RasterDataset")
        burnInGrid.save(scratchName)
        AddMsg(timer.split() + " Save intermediate grid complete: " +
               os.path.basename(scratchName))

    ##end of calculating the big patches for LandCover

    AddMsg((
        "{0} Performing focal SUM on reclassified raster with big patches using {1} cell radius neighborhood..."
    ).format(timer.split(), viewRadius))
    neighborhood = arcpy.sa.NbrCircle(int(viewRadius), "CELL")
    #focalGrid = arcpy.sa.FocalStatistics(reclassGrid == classValue, neighborhood, "SUM")
    focalGrid = arcpy.sa.FocalStatistics(burnInGrid == classValue,
                                         neighborhood, "SUM")

    AddMsg((
        "{0} Reclassifying focal SUM results into view = 1 and no-view = 0 binary raster..."
    ).format(timer.split()))
    #    delimitedVALUE = arcpy.AddFieldDelimiters(focalGrid,"VALUE")
    #    whereClause = delimitedVALUE+" = 0"
    #    viewGrid = Con(focalGrid, 1, 0, whereClause)
    whereValue = conValues[0]
    trueValue = conValues[1]
    viewGrid = Con(Raster(focalGrid) > whereValue, trueValue)
    return viewGrid
def convert_fire_history(**kwargs):
    """
    Description: converts fire history polygons to rasters and extracts to major grid and study area
    Inputs: 'work_geodatabase' -- path to a file geodatabase that will serve as the workspace
            'input_array' -- an array containing the target feature class to convert (must be first), the study area raster (must be second), and the grid raster (must be third)
            'output_array' -- an array containing the output raster
    Returned Value: Returns a raster dataset
    Preconditions: the target feature class must be created using the recent fire history function
    """

    # Import packages
    import arcpy
    from arcpy.sa import Con
    from arcpy.sa import ExtractByMask
    from arcpy.sa import IsNull
    from arcpy.sa import Raster
    import datetime
    import time
    import os

    # Parse key word argument inputs
    work_geodatabase = kwargs['work_geodatabase']
    input_feature = kwargs['input_array'][0]
    study_area = kwargs['input_array'][1]
    grid_raster = kwargs['input_array'][2]
    output_raster = kwargs['output_array'][0]

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Set workspace
    arcpy.env.workspace = work_geodatabase

    # Set snap raster and extent
    arcpy.env.snapRaster = study_area
    arcpy.env.extent = Raster(grid_raster).extent
    arcpy.env.cellSize = 'MINOF'

    # Define intermediate rasters
    convert_raster = os.path.splitext(output_raster)[0] + '.tif'

    # Convert fire history feature class to raster
    print('\tConverting feature class to raster within grid...')
    iteration_start = time.time()
    arcpy.conversion.PolygonToRaster(input_feature, 'FireYear', convert_raster, 'CELL_CENTER', 'FireYear', 10)
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})')
    print('\t----------')

    # Convert no data values to zero
    print('\tConverting no data to zero...')
    iteration_start = time.time()
    zero_raster = Con(IsNull(Raster(convert_raster)), 0, Raster(convert_raster))
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})')
    print('\t----------')

    # Extract raster to study area
    print(f'\tExtracting raster to grid...')
    iteration_start = time.time()
    extract1_raster = ExtractByMask(zero_raster, grid_raster)
    print(f'\tExtracting raster to study area...')
    extract2_raster = ExtractByMask(extract1_raster, study_area)
    print(f'\tCopying extracted raster to new raster...')
    arcpy.management.CopyRaster(extract2_raster,
                                output_raster,
                                '',
                                '',
                                '-32768',
                                'NONE',
                                'NONE',
                                '16_BIT_SIGNED',
                                'NONE',
                                'NONE',
                                'TIFF',
                                'NONE',
                                'CURRENT_SLICE',
                                'NO_TRANSPOSE'
                                )
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})')
    print('\t----------')
    out_process = f'Successfully extracted recent fire history to study area.'
    return out_process