Пример #1
0
def reprojectGeoDEM(inputDEM, outputDEM):

    from LUCI_SEEA.lib.external import utm

    log.info("DEM has geographic coordinate system. Reprojecting...")

    DEMSpatRef = arcpy.Describe(inputDEM).SpatialReference
    DEMExtent = arcpy.Describe(inputDEM).extent

    # Find midpoint of raster
    midPointX = DEMExtent.XMin + (DEMExtent.XMax - DEMExtent.XMin) / 2
    midPointY = DEMExtent.YMin + (DEMExtent.YMax - DEMExtent.YMin) / 2

    # Find out which UTM zone the DEM should be projected to
    northing, easting, zone, letter = utm.from_latlon(midPointY, midPointX)
    if letter >= "N":
        northSouth = "N"
    else:
        northSouth = "S"

    # Create the new projected coordinate system
    projSpatRef = arcpy.SpatialReference("WGS 1984 UTM Zone " + str(zone) + northSouth)

    # Obtain the transformation string to transform from GCS to PCS
    transformation = arcpy.ListTransformations(DEMSpatRef, projSpatRef, DEMExtent)[0]

    # Reproject DEM to Projected Coord System
    arcpy.ProjectRaster_management(inputDEMRaster, outputDEM, projSpatRef, geographic_transform=transformation)

    # Update coord system
    DEMSpatRef = arcpy.Describe(outputDEM).SpatialReference
    log.info("DEM coordinate system is now " + DEMSpatRef.Name)
Пример #2
0
def function(params):

    common.runSystemChecks(
    )  # Run to ensure config.xml is copied across to user_settings.xml if needed. This line can be removed after 31/10/18.

    # Get inputs
    p = common.paramsAsText(params)
    scratchPath = p[1]
    developerMode = common.strToBool(p[2])

    if developerMode == True:
        developerMode = 'Yes'
    else:
        developerMode = 'No'

    # Override the default values from user settings file (if they exist in the file)
    try:
        configValues = [('scratchPath', scratchPath),
                        ('developerMode', developerMode)]

        common.writeXML(configuration.userSettingsFile, configValues)

        log.info('Scratch path updated: ' + scratchPath)
        log.info('Developer mode updated: ' + developerMode)

    except Exception:
        raise
Пример #3
0
def codeSuccessfullyRun(codeBlockName, folder, rerun):

    try:
        success = False
        xmlFile = getProgressFilenames(folder).xmlFile

        if rerun:
            try:
                # Open file for reading
                tree = ET.parse(xmlFile)
                root = tree.getroot()
                codeBlockNodes = root.findall('CodeBlock')

            except Exception:
                removeFile(xmlFile)

            else:
                codeBlockNames = []
                for codeBlockNode in codeBlockNodes:

                    names = codeBlockNode.findall('Name')
                    for name in names:
                        codeBlockNames.append(name.text)

                if codeBlockName in codeBlockNames:
                    success = True

        if success:
            log.info('Skipping: ' + str(codeBlockName))

        return success

    except Exception:
        log.warning('Could not check if code block was previously run')
        log.warning(traceback.format_exc())
Пример #4
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        # Get inputs
        runSystemChecks = common.strToBool(pText[1])
        outputFolder = pText[5]
        yearAFolder = pText[6]
        yearBFolder = pText[7]
        slopeOption = pText[8]
        slopeAngle = pText[9]
        yearARain = pText[10]
        yearBRain = pText[11]
        yearASupport = pText[12]
        yearBSupport = pText[13]

        # Set option for LS-factor
        if slopeOption == 'Calculate based on slope and length only':
            lsOption = 'SlopeLength'

        elif slopeOption == 'Include upslope contributing area':
            lsOption = 'UpslopeArea'

        else:
            log.error('Invalid LS-factor option')
            sys.exit()

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks()

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Call RUSLE_scen_acc function
        RUSLE_scen_acc.function(outputFolder, yearAFolder, yearBFolder,
                                lsOption, slopeAngle, yearARain, yearBRain,
                                yearASupport, yearBSupport)

        # Set up filenames for display purposes
        soilLossA = os.path.join(outputFolder, "soillossA")
        soilLossB = os.path.join(outputFolder, "soillossB")
        soilLossDiff = os.path.join(outputFolder, "soillossDiff")

        arcpy.SetParameter(2, soilLossA)
        arcpy.SetParameter(3, soilLossB)
        arcpy.SetParameter(4, soilLossDiff)

        log.info("RUSLE accounts operations completed successfully")

    except Exception:
        log.exception("RUSLE accounts tool failed")
        raise
Пример #5
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        runSystemChecks = common.strToBool(pText[1])

        if params[2].name == 'Output_folder':
            outputFolder = pText[2]
        elif params[2].name == 'Land_extent_accounts':
            outputFolder = os.path.join(arcpy.env.scratchFolder, 'LCaccounts')
            LCaccounts = pText[2]

        lcOption = pText[3]
        inputLC = pText[4]
        openingLC = pText[5]
        closingLC = pText[6]
        openingField = pText[7]
        closingField = pText[8]
        lcTable = pText[9]
        lcCodeField = pText[10]
        lcNameField = pText[11]

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks()

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Call aggregation function
        lcOutputs = land_accounts.function(outputFolder, lcOption, inputLC,
                                           openingLC, closingLC, openingField,
                                           closingField, lcTable, lcCodeField,
                                           lcNameField)

        # Set up filenames for display purposes
        lcOpening = lcOutputs[0]
        lcClosing = lcOutputs[1]
        lcOpeningWithAccounts = lcOutputs[2]
        outCSV = lcOutputs[3]

        arcpy.SetParameter(12, lcOpening)
        arcpy.SetParameter(13, lcClosing)
        arcpy.SetParameter(14, outCSV)

        return lcOpeningWithAccounts, lcClosing, outCSV

        log.info("Land extent accounting operations completed successfully")

    except Exception:
        log.exception("Land extent accounting tool failed")
        raise
Пример #6
0
def listEnvironmentSettings():

    environments = arcpy.ListEnvironments()

    # Sort the environment names
    environments.sort()

    for environment in environments:
        # Format and print each environment and its current setting.
        # (The environments are accessed by key from arcpy.env.)
        log.info("{0:<30}: {1}".format(environment, arcpy.env[environment]))
Пример #7
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        runSystemChecks = common.strToBool(pText[1])
        # Get inputs
        if params[2].name == 'Output_folder':
            outputFolder = pText[2]
        elif params[2].name == 'Species_richness':
            outputFolder = os.path.join(arcpy.env.scratchFolder,
                                        'Species_richness')
            speciesRichness = pText[2]

        IUCN_rl_data = pText[4]
        studymask = pText[5]
        speciesdisplayname = pText[6]
        #aggregateMask = pText[7] # will add optional mask if want to calculate over aggregrate spatial units

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks()

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Call aggregation function
        outputStats = PAspeciesRIchness.function(outputFolder,
                                                 dataSetsToAggregate,
                                                 aggregateMask,
                                                 maskFullyWithinSAM,
                                                 dataToAggregate)

        # Set up filenames for display purposes
        RareSpeciesRichness = os.path.join(outputFolder,
                                           "RareSpeciesRichness.shp")
        arcpy.CopyFeatures_management(outputStats[0], RareSpeciesRichness)

        arcpy.SetParameter(3, SpeciesRichness)

        return outputStats[0], PAspeciesRIchness

        log.info("Rare species richness operations completed successfully")

    except Exception:
        log.exception("Rare species richness tool failed")
        raise
Пример #8
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        # Get inputs
        runSystemChecks = common.strToBool(pText[1])
        outputFolder = pText[2]
        inputRaster = pText[5]
        aggregationZones = pText[6]
        aggregationColumn = pText[7]

        rerun = False

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks(outputFolder, rerun)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Set up progress log file
        progress.initProgress(outputFolder, rerun)

        # Write input params to XML
        common.writeParamsToXML(params, outputFolder)

        # Call zonal statistics function
        CalcZonal.function(outputFolder, inputRaster, aggregationZones,
                           aggregationColumn)

        # Set up filenames for display purposes
        outRaster = os.path.join(outputFolder, 'statRaster')
        outTable = os.path.join(outputFolder, 'statTable.dbf')

        # Set up outputs
        arcpy.SetParameter(3, outRaster)
        arcpy.SetParameter(4, outTable)

        log.info("Zonal statistics operations completed successfully")

    except Exception:
        log.exception("Zonal statistics tool failed")
        raise
Пример #9
0
def clipLargeDEM(DEM, StudyAreaMask):

    try:
        # Work out filesize of DEM
        cols = arcpy.GetRasterProperties_management(DEM, "COLUMNCOUNT").getOutput(0)
        rows = arcpy.GetRasterProperties_management(DEM, "ROWCOUNT").getOutput(0)
        bitType = int(arcpy.GetRasterProperties_management(DEM, "VALUETYPE").getOutput(0))

        if bitType <= 4:    # 8 bit
            bytes = 1
        elif bitType <= 6:  # 16 bit
            bytes = 2
        elif bitType <= 9:  # 32 bit
            bytes = 4
        elif bitType <= 14: # 64 bit
            bytes = 8
        else:
            bytes = 4

        sizeInGb = int(cols) * int(rows) * bytes / (1024 * 1024 * 1024)

        if sizeInGb > 1: # 1Gb
            log.info('Clipping DEM as original DEM is too large (approximately ' + str(sizeInGb) + 'Gb)')

            # Buffer study area mask by 5km            
            bufferSAM = os.path.join(arcpy.env.scratchGDB, "bufferSAM")
            arcpy.Buffer_analysis(StudyAreaMask, bufferSAM, "5000 meters", "FULL", "ROUND", "ALL")

            # Clip DEM to this buffered area
            bufferedDEM = os.path.join(arcpy.env.scratchWorkspace, "bufferedDEM")
            extent = arcpy.Describe(bufferSAM).extent
            arcpy.Clip_management(DEM, str(extent), bufferedDEM, bufferSAM, nodata_value="-3.402823e+038", clipping_geometry="ClippingGeometry", maintain_clipping_extent="NO_MAINTAIN_EXTENT")

            log.warning('Since the DEM is large, reconditioning and preprocessing operations may take a long time')

            return bufferedDEM
        else:
            return DEM

    except Exception:
        log.error("Error occurred when determining if DEM needs to be clipped or not")
        raise
Пример #10
0
def function(DEM, streamNetwork, smoothDropBuffer, smoothDrop, streamDrop, outputReconDEM):

    try:
        # Set environment variables
        arcpy.env.extent = DEM
        arcpy.env.mask = DEM
        arcpy.env.cellSize = DEM

        # Set temporary variables
        prefix = "recon_"
        streamRaster = prefix + "streamRaster"

        # Determine DEM cell size and OID column name
        size = arcpy.GetRasterProperties_management(DEM, "CELLSIZEX")
        OIDField = arcpy.Describe(streamNetwork).OIDFieldName

        # Convert stream network to raster
        arcpy.PolylineToRaster_conversion(streamNetwork, OIDField, streamRaster, "", "", size)

        # Work out distance of cells from stream
        distanceFromStream = EucDistance(streamRaster, "", size)

        # Elements within a buffer distance of the stream are smoothly dropped
        intSmoothDrop = Con(distanceFromStream > float(smoothDropBuffer), 0,
                            (float(smoothDrop) / float(smoothDropBuffer)) * (float(smoothDropBuffer) - distanceFromStream))
        del distanceFromStream

        # Burn this smooth drop into DEM. Cells in stream are sharply dropped by the value of "streamDrop"
        binaryStream = Con(IsNull(Raster(streamRaster)), 0, 1)
        reconDEMTemp = Raster(DEM) - intSmoothDrop - (float(streamDrop) * binaryStream)
        del intSmoothDrop
        del binaryStream
        
        reconDEMTemp.save(outputReconDEM)
        del reconDEMTemp

        log.info("Reconditioned DEM generated")

    except Exception:
        log.error("DEM reconditioning function failed")
        raise
Пример #11
0
def function(params):

    class DataToAggregate:
        def __init__(self, dataSet, linkCode):
            self.dataSet = dataSet
            self.linkCode = linkCode

    try:
        pText = common.paramsAsText(params)
        

        # Get inputs
        if params[2].name == 'Output_folder':
            outputFolder = pText[2]
        elif params[2].name == 'Aggregated_data':
            outputFolder = os.path.join(arcpy.env.scratchFolder, 'AggregatedData')
            aggregatedData = pText[2]
        
        runSystemChecks = common.strToBool(pText[1])
        dataToAggregate = pText[7]
        classificationColumn = pText[8]
        aggregateMask = pText[9]
        maskFullyWithinSAM = common.strToBool(pText[10])

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks()

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Initialise variables
        dataSetsToAggregate = [DataToAggregate(dataToAggregate, classificationColumn)]

        # Call aggregation function
        outputStats = aggregate_data.function(outputFolder, dataSetsToAggregate, aggregateMask, maskFullyWithinSAM, dataToAggregate)

        # Set up filenames for display purposes
        InvSimpson = os.path.join(outputFolder, "InverseSimpsonIndex.shp")
        Shannon = os.path.join(outputFolder, "ShannonIndex.shp")
        meanPatch = os.path.join(outputFolder, "MeanPatchSize.shp")
        numCovers = os.path.join(outputFolder, "NumCovers.shp")

        arcpy.CopyFeatures_management(outputStats[0], InvSimpson)
        arcpy.CopyFeatures_management(outputStats[0], Shannon)
        arcpy.CopyFeatures_management(outputStats[0], meanPatch)
        arcpy.CopyFeatures_management(outputStats[0], numCovers)

        arcpy.SetParameter(3, InvSimpson)
        arcpy.SetParameter(4, Shannon)
        arcpy.SetParameter(5, numCovers)
        arcpy.SetParameter(6, meanPatch)

        return outputStats[0], InvSimpson, Shannon, numCovers, meanPatch

        log.info("Aggregation operations completed successfully")

    except Exception:
        log.exception("Aggregate data tool failed")
        raise
Пример #12
0
def function(outputFolder, inputRaster, aggregationZones, aggregationColumn):

    try:
        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "zonal_")

        zones = prefix + "aggZones"
        outZonal = prefix + "outZonal"

        # Define output files
        outRaster = os.path.join(outputFolder, 'statRaster')
        outTable = os.path.join(outputFolder, 'statTable.dbf')

        # Check if the aggregation column exists
        zoneFields = arcpy.ListFields(aggregationZones)
        zoneFound = False
        for field in zoneFields:
            fieldName = str(field.name)

            if fieldName == str(aggregationColumn):
                zoneFound = True

        if zoneFound == False:
            log.error('Aggregation column (' + str(aggregationColumn) +
                      ') not found in zone shapefile')
            log.error('Please ensure this field is present')
            sys.exit()

        # Dissolve aggregation zone based on aggregation column
        arcpy.Dissolve_management(aggregationZones, zones, aggregationColumn)
        log.info("Dissolved aggregation zones based on: " +
                 str(aggregationColumn))

        # Check out the ArcGIS Spatial Analyst extension license
        arcpy.CheckOutExtension("Spatial")

        # Calculate zonal statistics raster
        outZonal = arcpy.sa.ZonalStatistics(zones, aggregationColumn,
                                            inputRaster, "MEAN", "DATA")
        outZonal.save(outRaster)
        arcpy.CalculateStatistics_management(outRaster)
        log.info("Mean zonal statistics calculated")

        # Calculate zonal statistics table
        outZSTable = ZonalStatisticsAsTable(zones, aggregationColumn,
                                            inputRaster, outTable, "DATA",
                                            "ALL")

        log.info("Zonal statistics function completed successfully")

    except Exception:
        arcpy.AddError("Zonal statistics accounting function failed")
        raise

    finally:
        # Remove feature layers from memory
        try:
            for lyr in common.listFeatureLayers(locals()):
                arcpy.Delete_management(locals()[lyr])
                exec(lyr + ' = None') in locals()
        except Exception:
            pass
Пример #13
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        # Get inputs
        runSystemChecks = common.strToBool(pText[1])
        outputFolder = pText[2]
        preprocessFolder = pText[4]

        # R-factor
        rData = pText[5]

        # LS-factor
        slopeOption = pText[6]
        slopeAngle = pText[7]

        # K-factor
        kOption = pText[8]
        soilData = pText[9]
        soilCode = pText[10]

        # C-factor
        cOption = pText[11]
        landCoverData = pText[12]
        landCoverCode = pText[13]

        # P-factor
        supportData = pText[14]

        saveFactors = common.strToBool(pText[15])

        # Rerun parameter may not present when tool run as part of a batch run tool. If it is not, set rerun to False.
        try:
            rerun = common.strToBool(pText[16])
        except IndexError:
            rerun = False
        except Exception:
            raise

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks(outputFolder, rerun)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Set up progress log file
        progress.initProgress(outputFolder, rerun)

        # Write input params to XML
        common.writeParamsToXML(params, outputFolder)

        # Set option for LS-factor
        if slopeOption == 'Calculate based on slope and length only':
            lsOption = 'SlopeLength'

        elif slopeOption == 'Include upslope contributing area':
            lsOption = 'UpslopeArea'

        else:
            log.error('Invalid LS-factor option')
            sys.exit()

        # Set soilOption for K-factor
        if kOption == 'Use preprocessed soil data':
            soilOption = 'PreprocessSoil'

        elif kOption == 'Use local K-factor dataset':
            soilOption = 'LocalSoil'

        else:
            log.error('Invalid soil erodibility option')
            sys.exit()

        # Set lcOption for C-factor
        if cOption == 'Use preprocessed land cover data':
            lcOption = 'PrerocessLC'

        elif cOption == 'Use local C-factor dataset':
            lcOption = 'LocalCfactor'

        else:
            log.error('Invalid C-factor option')
            sys.exit()

        # Call RUSLE function
        soilLoss = RUSLE.function(outputFolder, preprocessFolder, lsOption,
                                  slopeAngle, soilOption, soilData, soilCode,
                                  lcOption, landCoverData, landCoverCode,
                                  rData, saveFactors, supportData, rerun)

        # Set up filenames for display purposes
        soilLoss = os.path.join(outputFolder, "soilloss")

        arcpy.SetParameter(3, soilLoss)

        return soilLoss

        log.info("RUSLE operations completed successfully")

    except Exception:
        log.exception("RUSLE tool failed")
        raise
Пример #14
0
def clipInputs(outputFolder, studyAreaMaskBuff, inputDEM, inputLC, inputSoil, inputStreamNetwork, outputDEM, outputLC, outputSoil, outputStream):

    try:
        log.info("Clipping input data")

        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "clip_")

        DEMCopy = prefix + "DEMCopy"
        lcResample = prefix + "lcResample"
        soilResample = prefix + "soilResample"

        # Clip DEM
        # Check DEM not compressed. If it is, uncompress before clipping.
        compression = arcpy.Describe(inputDEM).compressionType
        if compression.lower != 'none':
            arcpy.env.compression = "None"
            arcpy.CopyRaster_management(inputDEM, DEMCopy)
            arcpy.Clip_management(DEMCopy, "#", outputDEM, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete copy of DEM
            arcpy.Delete_management(DEMCopy)

        else:
            arcpy.Clip_management(inputDEM, "#", outputDEM, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

        DEMSpatRef = arcpy.Describe(outputDEM).SpatialReference

        # Set environment variables
        arcpy.env.snapRaster = outputDEM
        arcpy.env.extent = outputDEM
        arcpy.env.cellSize = outputDEM

        # Resample and clip land cover
        lcFormat = arcpy.Describe(inputLC).dataType

        if lcFormat in ['RasterDataset', 'RasterLayer']:
            lcResampleInt = arcpy.sa.ApplyEnvironment(inputLC)
            lcResampleInt.save(lcResample)
            del lcResampleInt

            arcpy.Clip_management(lcResample, "#", outputLC, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete resampled LC
            arcpy.Delete_management(lcResample)

        elif lcFormat in ['ShapeFile', 'FeatureClass']:
            arcpy.Clip_analysis(inputLC, studyAreaMaskBuff, outputLC, configuration.clippingTolerance)

        # Resample and clip soil
        soilFormat = arcpy.Describe(inputSoil).dataType

        if soilFormat in ['RasterDataset', 'RasterLayer']:
            soilResampleInt = arcpy.sa.ApplyEnvironment(inputSoil)
            soilResampleInt.save(soilResample)
            del soilResampleInt

            arcpy.Clip_management(soilResample, "#", outputSoil, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete resampled soil
            arcpy.Delete_management(soilResample)

        elif soilFormat in ['ShapeFile', 'FeatureClass']:
            arcpy.Clip_analysis(inputSoil, studyAreaMaskBuff, outputSoil, configuration.clippingTolerance)

        # Clip steam network
        if inputStreamNetwork == None:
            outputStream = None
        else:
            arcpy.Clip_analysis(inputStreamNetwork, studyAreaMaskBuff, outputStream, configuration.clippingTolerance)


        log.info("Input data clipped successfully")

    except Exception:
        log.error("Input data clipping did not complete successfully")
        raise
Пример #15
0
def function(outputFolder,
             preprocessFolder,
             lsOption,
             slopeAngle,
             soilOption,
             soilData,
             soilCode,
             lcOption,
             landCoverData,
             landCoverCode,
             rData,
             saveFactors,
             supportData,
             rerun=False):

    try:
        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "rusle_")

        supportCopy = prefix + "supportCopy"
        soilResample = prefix + "soilResample"
        lcResample = prefix + "lcResample"
        rainResample = prefix + "rainResample"
        supportResample = prefix + "supportResample"
        soilClip = prefix + "soilClip"
        landCoverClip = prefix + "landCoverClip"
        rainClip = prefix + "rainClip"
        supportClip = prefix + "supportClip"
        DEMSlopeCut = prefix + "DEMSlopeCut"
        DEMSlopeRad = prefix + "DEMSlopeRad"
        upslopeArea = prefix + "upslopeArea"
        soilJoin = prefix + "soilJoin"
        lcJoin = prefix + "lcJoin"
        rFactor = prefix + "rFactor"
        lsFactor = prefix + "lsFactor"
        kFactor = prefix + "kFactor"
        cFactor = prefix + "cFactor"
        pFactor = prefix + "pFactor"
        soilLossInt = prefix + "soilLossInt"
        landCoverRas = prefix + "landCoverRas"
        soilRas = prefix + "soilRas"
        dataMask = prefix + "dataMask"

        # Get input study area mask
        files = common.getFilenames('preprocess', preprocessFolder)
        studyMask = files.studyareamask
        inputLC = files.lc_ras
        inputSoil = files.soil_ras
        DEMSlopePerc = files.slopeRawPer
        DEMSlope = files.slopeHydDeg
        hydFAC = files.hydFAC
        rawDEM = files.rawDEM
        streamInvRas = files.streamInvRas

        # Set output filenames
        files = common.getFilenames('rusle', outputFolder)
        soilLoss = files.soilloss

        if saveFactors:
            # if RUSLE factor layers are to be saved

            rFactor = files.rFactor
            lsFactor = files.lsFactor
            kFactor = files.kFactor
            cFactor = files.cFactor
            pFactor = files.pFactor

        reconOpt = common.getInputValue(preprocessFolder, 'Recondition_DEM')

        ####################
        ### Check inputs ###
        ####################

        codeBlock = 'Check if new inputs are in a projected coordinate systems'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            inputs = [rData]

            optInputs = [soilData, landCoverData, supportData]
            for data in optInputs:
                if data is not None:
                    inputs.append(data)

            for data in inputs:
                spatialRef = arcpy.Describe(data).spatialReference

                if spatialRef.Type == "Geographic":
                    # If any of the inputs are not in a projected coordinate system, the tool exits with a warning
                    log.error('Data: ' + str(data))
                    log.error(
                        'This data has a Geographic Coordinate System. It must have a Projected Coordinate System.'
                    )
                    sys.exit()

            log.info(
                'All new inputs are in a projected coordinate system, proceeding.'
            )

            progress.logProgress(codeBlock, outputFolder)

        try:

            # Set environment and extents to DEM
            RawDEM = Raster(rawDEM)

            arcpy.env.extent = RawDEM
            arcpy.env.mask = RawDEM
            arcpy.env.cellSize = RawDEM
            arcpy.env.compression = "None"

            cellsizedem = float(
                arcpy.GetRasterProperties_management(rawDEM,
                                                     "CELLSIZEX").getOutput(0))

            log.info("Calculation extent set to DEM data extent")

        except Exception:
            log.error("Environment and extent conditions not set correctly")
            raise

        codeBlock = 'Convert any vector inputs to raster'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            if landCoverData is not None:
                lcFormat = arcpy.Describe(landCoverData).dataType

                if lcFormat in ['ShapeFile', 'FeatureClass']:

                    arcpy.PolygonToRaster_conversion(landCoverData,
                                                     landCoverCode,
                                                     landCoverRas,
                                                     "CELL_CENTER", "",
                                                     cellsizedem)
                    log.info('Land cover raster produced')

                else:
                    arcpy.CopyRaster_management(landCoverData, landCoverRas)

            if soilData is not None:
                soilFormat = arcpy.Describe(soilData).dataType

                if soilFormat in ['ShapeFile', 'FeatureClass']:

                    arcpy.PolygonToRaster_conversion(soilData, soilCode,
                                                     soilRas, "CELL_CENTER",
                                                     "", cellsizedem)
                    log.info('Soil raster produced')

                else:
                    arcpy.CopyRaster_management(soilData, soilRas)

            progress.logProgress(codeBlock, outputFolder)

        codeBlock = 'Resample down to DEM cell size'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Resample down to DEM cell size
            log.info("Resampling inputs down to DEM cell size")

            resampledRainTemp = arcpy.sa.ApplyEnvironment(rData)
            resampledRainTemp.save(rainResample)
            del resampledRainTemp

            if soilData is not None:
                resampledSoilTemp = arcpy.sa.ApplyEnvironment(soilRas)
                resampledSoilTemp.save(soilResample)
                del resampledSoilTemp

                # Delete soil raster
                arcpy.Delete_management(soilRas)

            if landCoverData is not None:
                resampledLCTemp = arcpy.sa.ApplyEnvironment(landCoverRas)
                resampledLCTemp.save(lcResample)
                del resampledLCTemp

                # Delete land cover raster
                arcpy.Delete_management(landCoverRas)

            if supportData is not None:

                arcpy.CopyRaster_management(supportData, supportCopy)
                resampledPTemp = arcpy.sa.ApplyEnvironment(supportCopy)
                resampledPTemp.save(supportResample)
                del resampledPTemp

                # Delete support raster
                arcpy.Delete_management(supportCopy)

            log.info("Inputs resampled")

            progress.logProgress(codeBlock, outputFolder)

        codeBlock = 'Clip inputs'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            log.info("Clipping inputs")

            arcpy.Clip_management(rainResample,
                                  "#",
                                  rainClip,
                                  studyMask,
                                  clipping_geometry="ClippingGeometry")

            # Delete resampled R-factor
            arcpy.Delete_management(rainResample)

            if soilData is not None:
                arcpy.Clip_management(soilResample,
                                      "#",
                                      soilClip,
                                      studyMask,
                                      clipping_geometry="ClippingGeometry")

                # Delete resampled soil
                arcpy.Delete_management(soilResample)

            if landCoverData is not None:
                arcpy.Clip_management(lcResample,
                                      "#",
                                      landCoverClip,
                                      studyMask,
                                      clipping_geometry="ClippingGeometry")

                # Delete resampled land cover
                arcpy.Delete_management(lcResample)

            if supportData is not None:
                arcpy.Clip_management(supportResample,
                                      "#",
                                      supportClip,
                                      studyMask,
                                      clipping_geometry="ClippingGeometry")

                # Delete resampled support data
                arcpy.Delete_management(supportResample)

            log.info("Inputs clipped")

            progress.logProgress(codeBlock, outputFolder)

        codeBlock = 'Check against study area mask'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            inputs = [rainClip]

            optInputs = [soilClip, landCoverClip, supportClip]
            for data in optInputs:
                if arcpy.Exists(data):
                    inputs.append(data)

            for data in inputs:
                dataMask = common.extractRasterMask(data)
                common.checkCoverage(dataMask, studyMask, data)
                del dataMask

            progress.logProgress(codeBlock, outputFolder)

        ####################################
        ### Rainfall factor calculations ###
        ####################################

        codeBlock = 'Produce R-factor layer'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Copy resampled raster
            arcpy.CopyRaster_management(rainClip, rFactor)

            # Delete clipped R-factor
            arcpy.Delete_management(rainClip)

            log.info("R-factor layer produced")

            progress.logProgress(codeBlock, outputFolder)

        ######################################################
        ### Slope length and steepness factor calculations ###
        ######################################################

        codeBlock = 'Produce LS-factor layer'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Calculate the threshold slope angle in percent
            lsFactorRad = float(slopeAngle) * (math.pi / 180.0)
            riseRun = math.tan(lsFactorRad)
            cutoffPercent = riseRun * 100.0

            if lsOption == 'SlopeLength':

                log.info(
                    "Calculating LS-factor based on slope length and steepness only"
                )

                # Produce slope cutoff raster
                DEMSlopeCutTemp = Con(
                    Raster(DEMSlopePerc) > float(cutoffPercent),
                    float(cutoffPercent), Raster(DEMSlopePerc))
                DEMSlopeCutTemp.save(DEMSlopeCut)
                del DEMSlopeCutTemp

                # Calculate the parts of the LS-factor equation separately
                lsCalcA = (cellsizedem / 22.0)**0.5
                lsCalcB = 0.065 + (0.045 * Raster(DEMSlopeCut)) + (
                    0.0065 * Power(Raster(DEMSlopeCut), 2.0))

                # Calculate the LS-factor
                lsOrigTemp = lsCalcA * lsCalcB
                lsOrigTemp.save(lsFactor)

                # Delete temporary files
                del lsCalcB
                del lsOrigTemp
                arcpy.Delete_management(DEMSlopeCut)

                log.info("LS-factor layer produced")

            elif lsOption == 'UpslopeArea':

                if reconOpt == 'false':
                    log.error(
                        'Cannot calculate LS-factor including upslope contributing area on unreconditioned DEM'
                    )
                    log.error(
                        'Rerun the preprocessing tool to recondition the DEM')
                    sys.exit()

                log.info(
                    "Calculating LS-factor including upslope contributing area"
                )

                # Produce slope cutoff raster
                DEMSlopeCutTemp = Con(
                    Raster(DEMSlope) > float(slopeAngle), float(slopeAngle),
                    Raster(DEMSlope))
                DEMSlopeCutTemp.save(DEMSlopeCut)
                del DEMSlopeCutTemp

                # Convert from degrees to radian
                DEMSlopeRadTemp = Raster(DEMSlopeCut) * 0.01745
                DEMSlopeRadTemp.save(DEMSlopeRad)
                del DEMSlopeRadTemp

                # Currently hardcoded, but should have them as options in future
                m = 0.5
                n = 1.2

                upslopeAreaTemp = Raster(hydFAC) * float(cellsizedem)
                upslopeAreaTemp.save(upslopeArea)
                del upslopeAreaTemp

                lsFactorTemp = (m + 1) * Power(
                    Raster(upslopeArea) / 22.1, float(m)) * Power(
                        Sin(Raster(DEMSlopeRad)) / 0.09, float(n))
                lsFactorTemp.save(lsFactor)
                del lsFactorTemp

                # Delete temporary files
                arcpy.Delete_management(DEMSlopeCut)
                arcpy.Delete_management(DEMSlopeRad)
                arcpy.Delete_management(upslopeArea)

                log.info("LS-factor layer produced")

            progress.logProgress(codeBlock, outputFolder)

        ################################
        ### Soil factor calculations ###
        ################################

        codeBlock = 'Produce K-factor layer'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            if soilOption == 'PreprocessSoil':

                # Use the soil from the preprocessFolder
                arcpy.CopyRaster_management(inputSoil, soilClip)

                kTable = os.path.join(configuration.tablesPath,
                                      "rusle_hwsd.dbf")
                arcpy.JoinField_management(soilClip, "VALUE", kTable,
                                           "MU_GLOBAL")
                arcpy.CopyRaster_management(soilClip, soilJoin)

                # Delete temporary files
                arcpy.Delete_management(soilClip)

                kOrigTemp = Lookup(soilJoin, "K_Stewart")
                kOrigTemp.save(kFactor)

                # Delete temporary files
                del kOrigTemp
                arcpy.Delete_management(soilJoin)

            elif soilOption == 'LocalSoil':

                # User input is their own K-factor dataset
                kOrigTemp = Raster(soilClip)
                kOrigTemp.save(kFactor)

                # Delete temporary files
                del kOrigTemp
                arcpy.Delete_management(soilClip)

            else:
                log.error('Invalid soil erodibility option')
                sys.exit()

            log.info("K-factor layer produced")

            progress.logProgress(codeBlock, outputFolder)

        #################################
        ### Cover factor calculations ###
        #################################

        codeBlock = 'Produce C-factor layer'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            if lcOption == 'PrerocessLC':

                # Use LC from the preprocess folder

                arcpy.CopyRaster_management(inputLC, landCoverClip)

                cTable = os.path.join(configuration.tablesPath,
                                      "rusle_esacci.dbf")

                arcpy.JoinField_management(landCoverClip, "VALUE", cTable,
                                           "LC_CODE")
                arcpy.CopyRaster_management(landCoverClip, lcJoin)

                arcpy.Delete_management(landCoverClip)

                cOrigTemp = Lookup(lcJoin, "CFACTOR")
                cOrigTemp.save(cFactor)

                # Delete temporary files
                del cOrigTemp
                arcpy.Delete_management(lcJoin)

            elif lcOption == 'LocalCfactor':

                # User input is their own C-factor dataset

                cOrigTemp = Raster(landCoverClip)
                cOrigTemp.save(cFactor)

                # Delete temporary files
                del cOrigTemp
                arcpy.Delete_management(landCoverClip)

            else:
                log.error('Invalid C-factor option')
                sys.exit()

            log.info("C-factor layer produced")

            progress.logProgress(codeBlock, outputFolder)

        #####################################
        ### Support practice calculations ###
        #####################################

        codeBlock = 'Produce P-factor layer'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            if supportData is not None:
                arcpy.CopyRaster_management(supportClip, pFactor)
                log.info("P-factor layer produced")

                # Delete temporary files
                arcpy.Delete_management(supportClip)

            progress.logProgress(codeBlock, outputFolder)

        ##############################
        ### Soil loss calculations ###
        ##############################

        codeBlock = 'Produce soil loss layer'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            if supportData is not None:
                soilLossTemp = Raster(rFactor) * Raster(lsFactor) * Raster(
                    kFactor) * Raster(cFactor) * Raster(pFactor)

            else:
                soilLossTemp = Raster(rFactor) * Raster(lsFactor) * Raster(
                    kFactor) * Raster(cFactor)

            if lsOption == 'UpslopeArea':
                soilLossTemp = soilLossTemp * Raster(streamInvRas)
                soilLossTemp.save(soilLoss)

            else:
                soilLossTemp.save(soilLoss)

            log.info("RUSLE function completed successfully")

            progress.logProgress(codeBlock, outputFolder)

        return soilLoss

    except Exception:
        arcpy.AddError("RUSLE function failed")
        raise

    finally:
        # Remove feature layers from memory
        try:
            for lyr in common.listFeatureLayers(locals()):
                arcpy.Delete_management(locals()[lyr])
                exec(lyr + ' = None') in locals()
        except Exception:
            pass
Пример #16
0
def function(params):

    try:
        pText = common.paramsAsText(params)
        # Get inputs
        runSystemChecks = common.strToBool(pText[1])
        outputFolder = pText[5]

        yearAFolder = pText[6]
        yearBFolder = pText[7]

        # Inputs constant between the two years
        slopeOption = pText[8]
        slopeAngle = pText[9]
        rData = pText[10]
        soilData = pText[11]
        soilCode = pText[12]

        # Land covers
        YearALCData = pText[13]
        YearALCCode = pText[14]
        YearBLCData = pText[15]
        YearBLCCode = pText[16]

        # Support factors
        YearAPData = pText[17]
        YearBPData = pText[18]

        saveFactors = False

        # Set option for LS-factor
        if slopeOption == 'Calculate based on slope and length only':
            lsOption = 'SlopeLength'

        elif slopeOption == 'Include upslope contributing area':
            lsOption = 'UpslopeArea'

        else:
            log.error('Invalid LS-factor option')
            sys.exit()

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks()

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Call RUSLE_accounts function

        RUSLE_accounts.function(outputFolder, yearAFolder, yearBFolder,
                                lsOption, slopeAngle, rData, soilData,
                                soilCode, YearALCData, YearALCCode,
                                YearBLCData, YearBLCCode, YearAPData,
                                YearBPData, saveFactors)

        # Set up filenames for display purposes
        soilLossA = os.path.join(outputFolder, "soillossA")
        soilLossB = os.path.join(outputFolder, "soillossB")
        soilLossDiff = os.path.join(outputFolder, "soillossDiff")

        arcpy.SetParameter(2, soilLossA)
        arcpy.SetParameter(3, soilLossB)
        arcpy.SetParameter(4, soilLossDiff)

        log.info("RUSLE accounts operations completed successfully")

    except Exception:
        log.exception("RUSLE accounts tool failed")
        raise
Пример #17
0
def function(outputFolder, dataSetsToAggregate, aggregateMask,
             maskFullyWithinSAM, studyAreaMask):

    try:
        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "aggdata_")

        studyAreaMaskDissolved = prefix + "studyAreaMaskDissolved"
        aggregateMaskClipped = prefix + "aggregateMaskClipped"

        if six.PY2:
            memoryPrefix = 'in_memory'
        else:
            memoryPrefix = 'memory'

        singleAggUnit = os.path.join(memoryPrefix, "singleAggUnit")
        dataClippedToUnit = os.path.join(memoryPrefix, "dataClippedToUnit")
        dataInUnitDissolved = os.path.join(memoryPrefix, "dataInUnitDissolved")

        tempLayer = "MaskLayer"
        unitMaskLayer = "UnitMaskLayer"

        # Clip aggregation mask to extent of study area
        tmpLyr1 = arcpy.MakeFeatureLayer_management(aggregateMask,
                                                    tempLayer).getOutput(0)
        arcpy.Dissolve_management(studyAreaMask, studyAreaMaskDissolved)

        if maskFullyWithinSAM:
            arcpy.SelectLayerByLocation_management(tempLayer,
                                                   "COMPLETELY_WITHIN",
                                                   studyAreaMaskDissolved)
            arcpy.CopyFeatures_management(tempLayer, aggregateMaskClipped)
        else:
            arcpy.CopyFeatures_management(aggregateMask, aggregateMaskClipped)

        # Find number of grid cells in aggregate mask
        numRecords = int(
            arcpy.GetCount_management(aggregateMaskClipped).getOutput(0))

        if numRecords == 0:
            log.error(
                'Aggregation unit feature class does not have any aggregation units intersecting the study area'
            )
            sys.exit()

        outputStats = []

        for dataToAggregate in dataSetsToAggregate:

            dataSet = dataToAggregate.dataSet
            linkCode = dataToAggregate.linkCode

            # Calculate size of each aggregation unit
            arcpy.AddField_management(aggregateMaskClipped, "AREA_SQKM",
                                      "DOUBLE")
            arcpy.CalculateField_management(aggregateMaskClipped, "AREA_SQKM",
                                            "!SHAPE.AREA@SQUAREKILOMETERS!",
                                            "PYTHON_9.3")

            # Productivity metrics
            tmpLyr2 = arcpy.MakeFeatureLayer_management(
                aggregateMaskClipped, unitMaskLayer).getOutput(0)
            OID = str(arcpy.Describe(aggregateMaskClipped).oidFieldName)

            # Initialise variables
            unitNo = 0
            numCovers = []
            shannonIndex = []
            inverseSimpsonsIndex = []
            meanPatchAreas = []

            with arcpy.da.SearchCursor(unitMaskLayer, OID) as gridCursor:

                # Loop through each aggregation unit
                for gridRow in gridCursor:

                    unitNo = unitNo + 1
                    log.info("Aggregating data from unit " + str(unitNo) +
                             " of " + str(numRecords))

                    expression = OID + "=%s" % gridRow[0]
                    arcpy.SelectLayerByAttribute_management(
                        unitMaskLayer, "NEW_SELECTION", expression)
                    arcpy.CopyFeatures_management(unitMaskLayer, singleAggUnit)

                    # Find aggregation unit size
                    for row in arcpy.da.SearchCursor(singleAggUnit,
                                                     "AREA_SQKM"):
                        unitSize = row[0]

                    # Clip data to unit and calculate area
                    arcpy.Clip_analysis(dataSet, singleAggUnit,
                                        dataClippedToUnit)
                    arcpy.AddField_management(dataClippedToUnit, "AREA_HA",
                                              "DOUBLE")
                    arcpy.CalculateField_management(dataClippedToUnit,
                                                    "AREA_HA",
                                                    "!SHAPE.AREA@HECTARES!",
                                                    "PYTHON_9.3")

                    # Dissolve clipped data and calculate area
                    arcpy.Dissolve_management(dataClippedToUnit,
                                              dataInUnitDissolved, linkCode)
                    arcpy.AddField_management(dataInUnitDissolved, "AREA_SQKM",
                                              "DOUBLE")
                    arcpy.CalculateField_management(
                        dataInUnitDissolved, "AREA_SQKM",
                        "!SHAPE.AREA@SQUAREKILOMETERS!", "PYTHON_9.3")

                    classificationsCount = 0
                    probOcc = [
                    ]  # list which will hold probability of occurence of each type

                    for row in arcpy.da.SearchCursor(dataInUnitDissolved,
                                                     [linkCode, "AREA_SQKM"]):
                        classificationsCount += 1
                        probOcc.append(row[1] / unitSize)

                    if len(probOcc) == 0:
                        shannon = -1
                        inverseSimpsons = -1
                    else:
                        shannon = -sum(probOcc * np.log(probOcc))
                        inverseSimpsons = 1 / sum(
                            np.array(probOcc) * np.array(probOcc))

                    shannonIndex.append(shannon)
                    inverseSimpsonsIndex.append(inverseSimpsons)
                    numCovers.append(classificationsCount)

                    patchAreas = []
                    for row in arcpy.da.SearchCursor(dataClippedToUnit,
                                                     [linkCode, "AREA_HA"]):
                        patchAreas.append(row[1])

                    if len(patchAreas) == 0:
                        meanPatchArea = 0
                    else:
                        meanPatchArea = np.mean(patchAreas)

                    meanPatchAreas.append(meanPatchArea)

            log.info("Completed iteration through aggregation units for " +
                     str(dataSet))

            # Determine output file name for data set statistics
            baseDataSetName = os.path.basename(dataSet).replace('-', '')
            if baseDataSetName[0] == '{':
                statsFilename = baseDataSetName[1:-1] + '_stats.shp'
            else:
                statsFilename = baseDataSetName[0:-4] + '_stats.shp'

            aggregateStats = os.path.join(outputFolder, statsFilename)

            arcpy.CopyFeatures_management(aggregateMaskClipped, aggregateStats)
            arcpy.AddField_management(aggregateStats, "NUM_COVERS", "SHORT")
            arcpy.AddField_management(aggregateStats, "SHANNON", "DOUBLE", 6,
                                      2)
            arcpy.AddField_management(aggregateStats, "INVSIMPSON", "DOUBLE",
                                      6, 2)
            arcpy.AddField_management(aggregateStats, "MEANPATCH", "DOUBLE", 6,
                                      2)

            unitNo = 0
            with arcpy.da.UpdateCursor(
                    aggregateStats,
                ['NUM_COVERS', 'SHANNON', 'INVSIMPSON', 'MEANPATCH'
                 ]) as cursor:
                for row in cursor:

                    row[0] = numCovers[unitNo]
                    row[1] = shannonIndex[unitNo]
                    row[2] = inverseSimpsonsIndex[unitNo]
                    row[3] = meanPatchAreas[unitNo]

                    cursor.updateRow(row)
                    unitNo = unitNo + 1

            outputStats.append(aggregateStats)

        log.info("Main aggregation function completed successfully")

        return outputStats

    except Exception:
        arcpy.AddError("Main aggregation function failed")
        raise

    finally:
        # Remove feature layers from memory
        try:
            for lyr in common.listFeatureLayers(locals()):
                arcpy.Delete_management(locals()[lyr])
                exec(lyr + ' = None') in locals()
        except Exception:
            pass
Пример #18
0
def function(outputFolder, inputData, aggregationColumn):

    try:
        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "extent_")
        zones = prefix + "aggZones"
        outZonal = prefix + "outZonal"
        rasData = prefix + "rasData"

        # Define field names
        extentName = "area_km2"
        percName = "percentCov"

        # Define output files
        outTable = os.path.join(outputFolder, 'statExtentTable.csv')

        # Ensure the input data is in a projected coordinate system
        spatialRef = arcpy.Describe(inputData).spatialReference
        unit = str(spatialRef.linearUnitName)

        if spatialRef.Type == "Geographic":
            log.error(
                'The input data has a Geographic Coordinate System. It must have a Projected Coordinate System.'
            )
            sys.exit()

        # Check input data type
        dataFormat = arcpy.Describe(inputData).dataType
        if dataFormat in ['ShapeFile', 'FeatureClass']:
            inputType = 'Shp'
        elif dataFormat in ['RasterDataset', 'RasterLayer']:
            inputType = 'Ras'
        else:
            log.error(
                'Input data is neither shapefile/feature class nor raster')
            log.error('Ensure data is one of these types')
            sys.exit()

        # If the input type is a shapefile
        if inputType == 'Shp':

            # Check if the aggregation column exists
            zoneFields = arcpy.ListFields(inputData)
            zoneFound = False
            for field in zoneFields:
                fieldName = str(field.name)

                if fieldName == str(aggregationColumn):
                    zoneFound = True

            if zoneFound == False:
                log.error('Aggregation column (' + str(aggregationColumn) +
                          ') not found in zone shapefile')
                log.error('Please ensure this field is present')
                sys.exit()

            # Dissolve aggregation zone based on aggregation column
            arcpy.Dissolve_management(inputData, zones, aggregationColumn)
            log.info("Dissolved aggregation zones based on: " +
                     str(aggregationColumn))

            # If extent field already exists in the shapefile, delete it here
            inputFields = arcpy.ListFields(zones)
            for field in inputFields:
                if field.name == extentName:
                    arcpy.DeleteField_management(zones, extentName)

            # Calculate geometry
            arcpy.AddField_management(zones, extentName, "FLOAT")
            exp = "!SHAPE.AREA@SQUAREKILOMETERS!"
            arcpy.CalculateField_management(zones, extentName, exp,
                                            "PYTHON_9.3")
            log.info("Area calculated for input data classes")

            # Calculate the total area
            totalArea = 0.0
            fields = [str(aggregationColumn), str(extentName)]
            with arcpy.da.SearchCursor(zones, fields) as cursor:
                for row in cursor:
                    name = row[0]
                    area = row[1]

                    totalArea += area

            # Calculate percent coverage
            arcpy.AddField_management(zones, percName, "FLOAT")
            fieldsPerc = [str(extentName), str(percName)]
            with arcpy.da.UpdateCursor(zones, fieldsPerc) as updateCursor:
                for row in updateCursor:

                    area = row[0]
                    percentCoverage = (float(area) / float(totalArea)) * 100.0
                    row[1] = percentCoverage

                    # Update row with percent coverage
                    try:
                        updateCursor.updateRow(row)
                    except Exception:
                        pass

            # Write to output table
            outFields = [aggregationColumn, extentName, percName]
            outLabels = ['Classes', 'Area (sq km)', 'Area (percent)']

            with open(outTable, 'wb') as csv_file:
                writer = csv.writer(csv_file)
                writer.writerow(outLabels)

                with arcpy.da.SearchCursor(zones, outFields) as cursor:
                    for row in cursor:
                        writer.writerow(row)

                log.info('Extent csv table created')

            csv_file.close()

        elif inputType == 'Ras':
            # If the user has input a raster file

            # Check if the raster is type integer
            rasType = arcpy.GetRasterProperties_management(
                inputData, "VALUETYPE")
            rasterTypes = [3, 4, 5, 6, 7, 8]

            if int(str(rasType)) in rasterTypes:
                log.info('Input raster is integer type, proceeding...')
            else:
                log.error('Input raster is not integer type')
                log.error('Please ensure input raster is integer type')
                sys.exit()

            # Check if COUNT column exists
            inputFields = arcpy.ListFields(inputData)
            countFound = False
            for field in inputFields:
                if field.name == 'COUNT':
                    countFound = True

            if countFound == False:
                log.error('COUNT column not found')
                log.error('Please ensure your raster has a COUNT column')
                sys.exit()

            # Get cell size of the raster
            cellSize = float(
                arcpy.GetRasterProperties_management(inputData,
                                                     "CELLSIZEX").getOutput(0))

            # Check units of raster
            if unit != 'Meter':
                log.error('Spatial reference units are not in metres')
                log.error('Please use a spatial reference that is in metres')
                sys.exit()

            # Copy raster to temporary file
            arcpy.CopyRaster_management(inputData, rasData)

            # Copy raster table to scratch GDB
            arcpy.TableToTable_conversion(inputData, arcpy.env.scratchGDB,
                                          "extent_table")
            dbfTable = os.path.join(arcpy.env.scratchGDB, "extent_table")

            # Add new fields to the dbfTable
            arcpy.AddField_management(dbfTable, extentName, "FLOAT")
            arcpy.AddField_management(dbfTable, percName, "FLOAT")

            # Calculate total area and area of each class
            totalArea = 0.0
            fields = [str(aggregationColumn), 'COUNT', extentName]
            with arcpy.da.UpdateCursor(dbfTable, fields) as updateCursor:
                for row in updateCursor:
                    name = row[0]
                    count = row[1]

                    # Calculate area in km2
                    area = float(count) * float(cellSize) * float(
                        cellSize) / 1000000.0
                    row[2] = area

                    totalArea += area

                    # Update row with area
                    try:
                        updateCursor.updateRow(row)
                    except Exception:
                        pass

            # Calculate percent coverage of each class
            fieldsPerc = [str(extentName), str(percName)]
            with arcpy.da.UpdateCursor(dbfTable, fieldsPerc) as updateCursor:
                for row in updateCursor:

                    area = row[0]
                    percentCoverage = (float(area) / float(totalArea)) * 100.0
                    row[1] = percentCoverage

                    # Update row with percent coverage
                    try:
                        updateCursor.updateRow(row)
                    except Exception:
                        pass

            log.info('Percent coverage calculated for each class')

            # Write output to CSV file
            outFields = [aggregationColumn, extentName, percName]
            outLabels = ['Classes', 'Area (sq km)', 'Area (percent)']

            with open(outTable, 'wb') as csv_file:
                writer = csv.writer(csv_file)
                writer.writerow(outLabels)

                with arcpy.da.SearchCursor(dbfTable, outFields) as cursor:
                    for row in cursor:
                        writer.writerow(row)

                log.info('Extent csv table created')

            csv_file.close()

        log.info("Extent statistics function completed successfully")

    except Exception:
        arcpy.AddError("Extent statistics function failed")
        raise

    finally:
        # Remove feature layers from memory
        try:
            for lyr in common.listFeatureLayers(locals()):
                arcpy.Delete_management(locals()[lyr])
                exec(lyr + ' = None') in locals()
        except Exception:
            pass
Пример #19
0
def function(outputFolder, lcOption, inputLC, openingLC, closingLC,
             openingField, closingField, lcTable, lcCodeField, lcNameField):

    try:
        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "lc_")

        year1 = prefix + "year1"
        year2 = prefix + "year2"
        joinedLC = prefix + "joinedLC"

        # Ensure all inputs are in a projected coordinate system
        inputs = []

        if inputLC is not None:
            inputs.append(inputLC)

        if openingLC is not None:
            inputs.append(openingLC)

        if closingLC is not None:
            inputs.append(closingLC)

        log.info("Checking if inputs are in a projected coordinate system")

        for data in inputs:
            spatialRef = arcpy.Describe(data).spatialReference

            if spatialRef.Type == "Geographic":
                # If any of the inputs are not in a projected coordinate system, the tool exits with a warning

                log.error('Data: ' + str(data))
                log.error(
                    'This data has a Geographic Coordinate System. It must have a Projected Coordinate System.'
                )
                sys.exit()

        # Divide lcOption here
        if lcOption == 'One shapefile with multiple fields':
            lcOptionCode = 1

        elif lcOption == 'Two separate shapefiles':
            lcOptionCode = 2

        else:
            log.error("Invalid land cover option, exiting tool")
            sys.exit()

        if lcOptionCode == 1:

            # Dissolve land cover based on the opening year field
            arcpy.Dissolve_management(inputLC, year1, openingField)
            log.info("Dissolved opening year land cover based on: " +
                     str(openingField))

            # Dissolve land cover based on the closing year field
            arcpy.Dissolve_management(inputLC, year2, closingField)
            log.info("Dissolved closing year land cover based on: " +
                     str(closingField))

        elif lcOptionCode == 2:

            arcpy.Dissolve_management(openingLC, year1, openingField)
            log.info("Dissolved opening year land cover based on: " +
                     str(openingField))

            arcpy.Dissolve_management(closingLC, year2, closingField)
            log.info("Dissolved closing year land cover based on: " +
                     str(closingField))

        # Calculate geometry
        arcpy.AddField_management(year1, "area1_km2", "FLOAT")
        exp = "!SHAPE.AREA@SQUAREKILOMETERS!"
        arcpy.CalculateField_management(year1, "area1_km2", exp, "PYTHON_9.3")
        log.info("Area calculated for land cover in opening year")

        arcpy.AddField_management(year2, "area2_km2", "FLOAT")
        exp = "!SHAPE.AREA@SQUAREKILOMETERS!"
        arcpy.CalculateField_management(year2, "area2_km2", exp, "PYTHON_9.3")
        log.info("Area calculated for land cover in closing year")

        # Join the two shapefiles
        arcpy.JoinField_management(year1, openingField, year2, closingField)
        arcpy.Copy_management(year1, joinedLC)
        log.info("Joined opening and closing land covers")

        # Add two new fields: AbsDiff (absolute difference) and RelDiff (relative difference)
        arcpy.AddField_management(joinedLC, "AbsDiff", "FLOAT")
        arcpy.AddField_management(joinedLC, "RelDiff", "FLOAT")

        # Calculate values of the new fields
        with arcpy.da.UpdateCursor(
                joinedLC,
            ['area1_km2', 'area2_km2', 'AbsDiff', 'RelDiff']) as cursor:
            for row in cursor:

                Area1 = row[0]
                Area2 = row[1]

                AbsDiff = Area2 - Area1
                row[2] = AbsDiff

                RelDiff = (float(AbsDiff) / float(Area2)) * 100.0
                row[3] = RelDiff

                cursor.updateRow(row)

        log.info(
            "Absolute and relative land cover change differences calculated")

        # If user has entered a land cover table, join it here
        if lcTable is not None:
            arcpy.JoinField_management(joinedLC, openingField, lcTable,
                                       lcCodeField)
            arcpy.JoinField_management(year1, openingField, lcTable,
                                       lcCodeField)
            arcpy.JoinField_management(year2, closingField, lcTable,
                                       lcCodeField)
            log.info("Land cover table provided and linked with output")

        # Create a CSV file with only the information the user requires
        exportFields = [
            openingField, 'area1_km2', 'area2_km2', 'AbsDiff', 'RelDiff'
        ]
        headings = [
            'Land cover code', 'Opening area (sq km)', 'Closing area (sq km)',
            'Absolute Difference', 'Relative Difference'
        ]

        outCSV = os.path.join(outputFolder, 'LandAccounts.csv')

        with open(outCSV, 'wb') as csv_file:
            writer = csv.writer(csv_file)
            writer.writerow(headings)

            with arcpy.da.SearchCursor(joinedLC, exportFields) as cursor:
                for row in cursor:
                    writer.writerow(row)

            log.info('Land cover account csv table created')

        csv_file.close()

        ######################
        ### Export outputs ###
        ######################

        # Set output filenames
        lcOpening = 'lcOpening.shp'
        lcClosing = 'lcClosing.shp'
        lcOpeningWithAccounts = 'lcOpeningAcc.shp'

        arcpy.FeatureClassToFeatureClass_conversion(year1, outputFolder,
                                                    lcOpening)
        arcpy.FeatureClassToFeatureClass_conversion(year2, outputFolder,
                                                    lcClosing)
        arcpy.FeatureClassToFeatureClass_conversion(joinedLC, outputFolder,
                                                    lcOpeningWithAccounts)

        # Create list of outputs
        lcOutputs = []
        lcOutputs.append(os.path.join(outputFolder, lcOpening))
        lcOutputs.append(os.path.join(outputFolder, lcClosing))
        lcOutputs.append(os.path.join(outputFolder, lcOpeningWithAccounts))
        lcOutputs.append(outCSV)

        return lcOutputs

        log.info("Land cover accounting function completed successfully")

    except Exception:
        arcpy.AddError("Land cover accounting function failed")
        raise

    finally:
        # Remove feature layers from memory
        try:
            for lyr in common.listFeatureLayers(locals()):
                arcpy.Delete_management(locals()[lyr])
                exec(lyr + ' = None') in locals()
        except Exception:
            pass
Пример #20
0
def runSystemChecks(folder=None, rerun=False):

    import LUCI_SEEA.lib.progress as progress

    # Set overwrite output
    arcpy.env.overwriteOutput = True

    # Check spatial analyst licence is available
    if arcpy.CheckExtension("Spatial") == "Available":
        arcpy.CheckOutExtension("Spatial")
    else:
        raise RuntimeError(
            "Spatial Analyst license not present or could not be checked out")

    ### Set workspaces so that temporary files are written to the LUCI scratch geodatabase ###
    if arcpy.ProductInfo() == "ArcServer":
        log.info('arcpy.env.scratchWorkspace on server: ' +
                 str(arcpy.env.scratchWorkspace))

        # Set current workspace
        arcpy.env.workspace = arcpy.env.scratchGDB
    else:

        # If rerunning a tool, check if scratch workspace has been set. If it has, use it as it is (with temporary rasters and feature classes from the previous run).
        scratchGDB = None

        if rerun:
            xmlFile = progress.getProgressFilenames(folder).xmlFile

            if os.path.exists(xmlFile):
                scratchGDB = readXML(xmlFile, 'ScratchGDB')

                if not arcpy.Exists(scratchGDB):
                    log.error('Previous scratch GDB ' + str(scratchGDB) +
                              ' does not exist. Tool cannot be rerun.')
                    log.error('Exiting tool')
                    sys.exit()

        if scratchGDB is None:

            # Set scratch path from values in user settings file if values present
            scratchPath = None
            try:
                if os.path.exists(configuration.userSettingsFile):

                    tree = ET.parse(configuration.userSettingsFile)
                    root = tree.getroot()
                    scratchPath = root.find("scratchPath").text

            except Exception:
                pass  # If any errors occur, ignore them. Just use the default scratch path.

            # Set scratch path if needed
            if scratchPath is None:
                scratchPath = configuration.scratchPath

            # Create scratch path folder
            if not os.path.exists(scratchPath):
                os.makedirs(scratchPath)

            # Remove old date/time stamped scratch folders if they exist and if they do not contain lock ArcGIS lock files.
            for root, dirs, files in os.walk(scratchPath):
                for dir in dirs:

                    # Try to rename folder. If this is possible then no locks are held on it and it can then be removed.
                    try:
                        fullDirPath = os.path.join(scratchPath, dir)
                        renamedDir = os.path.join(scratchPath,
                                                  'ready_for_deletion')
                        os.rename(fullDirPath, renamedDir)
                    except Exception:
                        # import traceback
                        # log.warning(traceback.format_exc())
                        pass
                    else:
                        try:
                            shutil.rmtree(renamedDir)
                        except Exception:
                            # import traceback
                            # log.warning(traceback.format_exc())
                            pass

            # Create new date/time stamped scratch folder for the scratch GDB to live in
            dateTimeStamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
            scratchGDBFolder = os.path.join(scratchPath,
                                            'scratch_' + dateTimeStamp)
            if not os.path.exists(scratchGDBFolder):
                os.mkdir(scratchGDBFolder)

            # Create scratch GDB
            scratchGDB = os.path.join(scratchGDBFolder, 'scratch.gdb')
            if not os.path.exists(scratchGDB):
                arcpy.CreateFileGDB_management(os.path.dirname(scratchGDB),
                                               os.path.basename(scratchGDB))

            # Try to remove old scratch path if still exists
            try:
                shutil.rmtree(configuration.oldScratchPath, ignore_errors=True)
            except Exception:
                pass

        # Set scratch and current workspaces
        arcpy.env.scratchWorkspace = scratchGDB
        arcpy.env.workspace = scratchGDB

        # Scratch folder
        scratchFolder = arcpy.env.scratchFolder
        if not os.path.exists(scratchFolder):
            os.mkdir(scratchFolder)

        # Remove all in_memory data sets
        arcpy.Delete_management("in_memory")

    # Check disk space for disk with scratch workspace
    freeSpaceGb = 3
    if getFreeDiskSpaceGb(arcpy.env.scratchWorkspace) < freeSpaceGb:
        log.warning("Disk containing scratch workspace has less than " +
                    str(freeSpaceGb) +
                    "Gb free space. This may cause this tool to fail.")
Пример #21
0
def function(params):

    try:
        ###################
        ### Read inputs ###
        ###################

        pText = common.paramsAsText(params)

        outputFolder = pText[1]
        inputDEM = common.fullPath(pText[2])
        inputStudyAreaMask = pText[3]
        inputLC = pText[4]
        lcCode = pText[5]
        inputSoil = pText[6]
        soilCode = pText[7]
        reconDEM = common.strToBool(pText[8])
        inputStreamNetwork = pText[9]
        streamAccThresh = pText[10]
        riverAccThresh = pText[11]
        smoothDropBuffer = pText[12]
        smoothDrop = pText[13]
        streamDrop = pText[14]
        rerun = common.strToBool(pText[15])

        log.info('Inputs read in')

        ###########################
        ### Tool initialisation ###
        ###########################

        # Create Baseline folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Run system checks
        common.runSystemChecks(outputFolder, rerun)

        # Set up progress log file
        progress.initProgress(outputFolder, rerun)

        # Write input params to XML
        common.writeParamsToXML(params, outputFolder, 'PreprocessDEM')

        log.info('Tool initialised')

        ########################
        ### Define filenames ###
        ########################

        files = common.getFilenames('preprocess', outputFolder)
        studyAreaMask = files.studyareamask
        outputLCras = files.lc_ras
        outputLCvec = files.lc_vec
        outputSoilras = files.soil_ras
        outputSoilvec = files.soil_vec

        ###############################
        ### Set temporary variables ###
        ###############################

        prefix = os.path.join(arcpy.env.scratchGDB, 'base_')

        DEMTemp = prefix + 'DEMTemp'
        clippedDEM = prefix + 'clippedDEM'
        clippedLC = prefix + 'clippedLC'
        clippedSoil = prefix + 'clippedSoil'
        clippedStreamNetwork = prefix + 'clippedStreamNetwork'

        studyAreaMaskTemp = prefix + "studyAreaMaskTemp"
        studyAreaMaskBuff = prefix + "studyAreaMaskBuff"
        studyAreaMaskDiss = prefix + "studyAreaMaskDiss"

        log.info('Temporary variables set')

        # Check formats of inputs
        lcFormat = arcpy.Describe(inputLC).dataType
        soilFormat = arcpy.Describe(inputSoil).dataType

        ###################
        ### Data checks ###
        ###################

        codeBlock = 'Data checks 1'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            inputFiles = [inputDEM, inputStudyAreaMask, inputLC, inputSoil]
            if inputStreamNetwork is not None:
                inputFiles.append(inputStreamNetwork)

            for file in inputFiles:
                common.checkSpatialRef(file)

            # Set environment variables
            arcpy.env.snapRaster = inputDEM
            arcpy.env.cellSize = inputDEM
            arcpy.env.compression = "None"

            cellsizedem = float(
                arcpy.GetRasterProperties_management(inputDEM,
                                                     "CELLSIZEX").getOutput(0))

            # Get spatial references of DEM and study area mask
            DEMSpatRef = arcpy.Describe(inputDEM).SpatialReference
            maskSpatRef = arcpy.Describe(inputStudyAreaMask).SpatialReference

            # Reproject study area mask if it does not have the same coordinate system as the DEM
            if not common.equalProjections(DEMSpatRef, maskSpatRef):

                warning = "Study area mask does not have the same coordinate system as the DEM"
                log.warning(warning)
                common.logWarnings(outputFolder, warning)

                warning = "Mask coordinate system is " + maskSpatRef.Name + " while DEM coordinate system is " + DEMSpatRef.Name
                log.warning(warning)
                common.logWarnings(outputFolder, warning)

                warning = "Reprojecting study area mask to DEM coordinate system"
                log.warning(warning)
                common.logWarnings(outputFolder, warning)

                arcpy.Project_management(inputStudyAreaMask, studyAreaMaskTemp,
                                         DEMSpatRef)
                arcpy.CopyFeatures_management(studyAreaMaskTemp, studyAreaMask)
            else:
                arcpy.CopyFeatures_management(inputStudyAreaMask,
                                              studyAreaMask)

            # If DEM is large, clip it to a large buffer around the study area mask (~5km)
            inputDEM = baseline.clipLargeDEM(inputDEM, studyAreaMask)

            rasterInputFiles = []
            fcInputFiles = []

            # Sort land cover and soil into appropriate arrays based on data type
            if lcFormat in ['RasterDataset', 'RasterLayer']:
                rasterInputFiles.append(inputLC)
                outputLC = os.path.join(outputFolder, 'landcover')

            elif lcFormat in ['ShapeFile', 'FeatureClass']:
                fcInputFiles.append(inputLC)
                outputLC = os.path.join(outputFolder, 'landcover.shp')

            if soilFormat in ['RasterDataset', 'RasterLayer']:
                rasterInputFiles.append(inputSoil)
                outputSoil = os.path.join(outputFolder, 'soil')

            elif soilFormat in ['ShapeFile', 'FeatureClass']:
                fcInputFiles.append(inputSoil)
                outputSoil = os.path.join(outputFolder, 'soil.shp')

            if reconDEM is True and inputStreamNetwork is None:
                log.error(
                    'Cannot recondition the DEM without an input stream network'
                )
                log.error('Please provide an input stream network')
                sys.exit()

            # If the user has provided a stream network, add it to the list of inputs to check
            if inputStreamNetwork is not None:
                fcInputFiles.append(inputStreamNetwork)

            # Check that the inputs contain data
            for ras in rasterInputFiles:
                if ras is not None:

                    # Check file size
                    fileSizeGB = baseline.checkRasterSizeGB(ras)

                    if fileSizeGB < 1.0:
                        baseline.checkInputRaster(ras, outputFolder)

                    else:
                        log.warning(
                            "Cannot check if raster is empty or all NoData because it is too large"
                        )
                        log.warning(
                            "Please ensure this raster is not empty or all NoData: "
                            + str(ras))

            for fc in fcInputFiles:
                if fc is not None:
                    baseline.checkInputFC(fc, outputFolder)

            # Check that the land cover and soil FCs have the linking codes specified by the user
            if lcFormat in ['ShapeFile', 'FeatureClass']:
                if len(arcpy.ListFields(inputLC, lcCode)) != 1:
                    log.error('Field ' + lcCode +
                              'does not exist in feature class ' + inputLC)
                    sys.exit()

            if soilFormat in ['ShapeFile', 'FeatureClass']:
                if len(arcpy.ListFields(inputSoil, soilCode)) != 1:
                    log.error('Field ' + soilCode +
                              'does not exist in feature class ' + inputSoil)
                    sys.exit()

            progress.logProgress(codeBlock, outputFolder)

        ###############################
        ### Tidy up study area mask ###
        ###############################

        codeBlock = 'Tidy up study area mask'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Check how many polygons are in the mask shapefile
            numPolysInMask = int(
                arcpy.GetCount_management(studyAreaMask).getOutput(0))
            if numPolysInMask > 1:

                # Reduce multiple features where possible
                arcpy.Union_analysis(studyAreaMask, studyAreaMaskDiss,
                                     "ONLY_FID", "", "NO_GAPS")
                arcpy.Dissolve_management(studyAreaMaskDiss, studyAreaMask, "",
                                          "", "SINGLE_PART", "DISSOLVE_LINES")

            # Buffer study area mask
            baseline.bufferMask(inputDEM,
                                studyAreaMask,
                                outputStudyAreaMaskBuff=studyAreaMaskBuff)
            log.info('Study area mask buffered')

            progress.logProgress(codeBlock, outputFolder)

        #######################
        ### Clip input data ###
        #######################

        codeBlock = 'Clip inputs'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            baseline.clipInputs(outputFolder,
                                studyAreaMaskBuff,
                                inputDEM,
                                inputLC,
                                inputSoil,
                                inputStreamNetwork,
                                outputDEM=clippedDEM,
                                outputLC=clippedLC,
                                outputSoil=clippedSoil,
                                outputStream=clippedStreamNetwork)

            progress.logProgress(codeBlock, outputFolder)

        ##############################################
        ### Coverage checks on soil and land cover ###
        ##############################################

        codeBlock = 'Do coverage checks on clipped land cover and soil'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Do coverage checks on land cover and soil and copy to outputFolder
            if lcFormat in ['RasterDataset', 'RasterLayer']:

                lcMask = common.extractRasterMask(clippedLC)
                common.checkCoverage(lcMask, studyAreaMaskBuff, inputLC)

                arcpy.CopyRaster_management(clippedLC, outputLCras)

            elif lcFormat in ['ShapeFile', 'FeatureClass']:
                lcMask = common.dissolvePolygon(clippedLC)
                common.checkCoverage(lcMask, studyAreaMaskBuff, inputLC)

                arcpy.CopyFeatures_management(clippedLC, outputLCvec)

            if soilFormat in ['RasterDataset', 'RasterLayer']:

                soilMask = common.extractRasterMask(clippedSoil)
                common.checkCoverage(soilMask, studyAreaMaskBuff, inputLC)

                arcpy.CopyRaster_management(clippedSoil, outputSoilras)

            elif soilFormat in ['ShapeFile', 'FeatureClass']:

                soilMask = common.dissolvePolygon(clippedSoil)
                common.checkCoverage(soilMask, studyAreaMaskBuff, inputSoil)

                arcpy.CopyFeatures_management(clippedSoil, outputSoilvec)

            progress.logProgress(codeBlock, outputFolder)

        ######################################
        ### Convert LC and soil to rasters ###
        ######################################

        # For the RUSLE tool, the LC and soil must be in raster format

        codeBlock = 'Convert land cover and soil to rasters'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            if lcFormat in ['ShapeFile', 'FeatureClass']:
                arcpy.PolygonToRaster_conversion(clippedLC, lcCode,
                                                 outputLCras, "CELL_CENTER",
                                                 "", cellsizedem)
                log.info('Land cover raster produced')

            if soilFormat in ['ShapeFile', 'FeatureClass']:
                arcpy.PolygonToRaster_conversion(clippedSoil, soilCode,
                                                 outputSoilras, "CELL_CENTER",
                                                 "", cellsizedem)
                log.info('Soil raster produced')

            # Delete intermediate files
            arcpy.Delete_management(clippedLC)
            arcpy.Delete_management(clippedSoil)

            progress.logProgress(codeBlock, outputFolder)

        ###########################
        ### Run HydTopo process ###
        ###########################

        log.info("*** Preprocessing DEM ***")
        preprocess_dem.function(outputFolder, clippedDEM, studyAreaMask,
                                clippedStreamNetwork, streamAccThresh,
                                riverAccThresh, smoothDropBuffer, smoothDrop,
                                streamDrop, reconDEM, rerun)

    except Exception:
        arcpy.SetParameter(0, False)
        log.exception("Preprocessing DEM functions did not complete")
        raise
Пример #22
0
def function(outputFolder, yearAFolder, yearBFolder, lsOption, slopeAngle,
             rData, soilData, soilCode, YearALCData, YearALCCode, YearBLCData,
             YearBLCCode, YearAPData, YearBPData, saveFactors):

    try:
        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "rusleAcc_")

        clipA = prefix + "clipA"
        clipB = prefix + "clipB"
        lossA = prefix + "lossA"
        lossB = prefix + "lossB"
        diffLoss = prefix + "diffLoss"
        diffNullZero = prefix + "diffNullZero"

        # Set output filenames
        soilLossA = os.path.join(outputFolder, "soillossA")
        soilLossB = os.path.join(outputFolder, "soillossB")
        soilLossDiff = os.path.join(outputFolder, "soillossDiff")

        # Set soil option for both years
        soilOption = 'LocalSoil'

        # Set LC option for both years
        lcOption = 'LocalCfactor'

        ################################
        ### Running RUSLE for Year A ###
        ################################

        log.info('*****************************')
        log.info('Running RUSLE tool for Year A')
        log.info('*****************************')

        filesA = common.getFilenames('preprocess', yearAFolder)
        studyMaskA = filesA.studyareamask

        # Call RUSLE function for Year A
        soilLoss = RUSLE.function(outputFolder, yearAFolder, lsOption,
                                  slopeAngle, soilOption, soilData, soilCode,
                                  lcOption, YearALCData, YearALCCode, rData,
                                  saveFactors, YearAPData)

        arcpy.CopyRaster_management(soilLoss, soilLossA)

        # Delete intermediate files
        arcpy.Delete_management(soilLoss)

        ################################
        ### Running RUSLE for Year B ###
        ################################

        log.info('*****************************')
        log.info('Running RUSLE tool for Year B')
        log.info('*****************************')

        filesB = common.getFilenames('preprocess', yearBFolder)
        studyMaskB = filesB.studyareamask

        # Call RUSLE function for Year B
        soilLoss = RUSLE.function(outputFolder, yearBFolder, lsOption,
                                  slopeAngle, soilOption, soilData, soilCode,
                                  lcOption, YearBLCData, YearBLCCode, rData,
                                  saveFactors, YearBPData)

        arcpy.CopyRaster_management(soilLoss, soilLossB)

        # Delete intermediate files
        arcpy.Delete_management(soilLoss)

        #######################################################
        ### Calculate differences between Year A and Year B ###
        #######################################################

        log.info('*************************************************')
        log.info('Calculating differences between Year A and Year B')
        log.info('*************************************************')

        # Copy soil loss layers to temporary files
        arcpy.CopyRaster_management(soilLossA, lossA)
        arcpy.CopyRaster_management(soilLossB, lossB)

        diffTemp = Raster(lossB) - Raster(lossA)
        diffTemp.save(diffLoss)
        del diffTemp

        log.info('Removing the areas of zero difference')
        diffNullTemp = SetNull(diffLoss, diffLoss, "VALUE = 0")
        diffNullTemp.save(diffNullZero)
        del diffNullTemp

        arcpy.CopyRaster_management(diffNullZero, soilLossDiff)

        log.info("RUSLE accounts function completed successfully")

    except Exception:
        arcpy.AddError("RUSLE accounts function failed")
        raise

    finally:
        # Remove feature layers from memory
        try:
            for lyr in common.listFeatureLayers(locals()):
                arcpy.Delete_management(locals()[lyr])
                exec(lyr + ' = None') in locals()
        except Exception:
            pass
Пример #23
0
def function(outputFolder,
             DEM,
             studyAreaMask,
             streamInput,
             minAccThresh,
             majAccThresh,
             smoothDropBuffer,
             smoothDrop,
             streamDrop,
             reconDEM,
             rerun=False):

    try:
        # Set environment variables
        arcpy.env.compression = "None"
        arcpy.env.snapRaster = DEM
        arcpy.env.extent = DEM
        arcpy.env.cellSize = arcpy.Describe(DEM).meanCellWidth

        ########################
        ### Define filenames ###
        ########################

        files = common.getFilenames('preprocess', outputFolder)

        rawDEM = files.rawDEM
        hydDEM = files.hydDEM
        hydFDR = files.hydFDR
        hydFDRDegrees = files.hydFDRDegrees
        hydFAC = files.hydFAC
        streamInvRas = files.streamInvRas  # Inverse stream raster - 0 for stream, 1 for no stream
        streams = files.streams
        streamDisplay = files.streamDisplay
        multRaster = files.multRaster
        hydFACInt = files.hydFACInt
        slopeRawDeg = files.slopeRawDeg
        slopeRawPer = files.slopeRawPer
        slopeHydDeg = files.slopeHydDeg
        slopeHydPer = files.slopeHydPer

        ###############################
        ### Set temporary variables ###
        ###############################

        prefix = os.path.join(arcpy.env.scratchGDB, "base_")

        cellSizeDEM = float(arcpy.env.cellSize)

        burnedDEM = prefix + "burnedDEM"
        streamAccHaFile = prefix + "streamAccHa"
        rawFDR = prefix + "rawFDR"
        allPolygonSinks = prefix + "allPolygonSinks"
        DEMTemp = prefix + "DEMTemp"
        hydFACTemp = prefix + "hydFACTemp"

        # Saved as .tif as did not save as ESRI grid on server
        streamsRasterFile = os.path.join(arcpy.env.scratchFolder,
                                         "base_") + "StreamsRaster.tif"

        ###############################
        ### Save DEM to base folder ###
        ###############################

        codeBlock = 'Save DEM'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Save DEM to base folder as raw DEM with no compression
            pixelType = int(
                arcpy.GetRasterProperties_management(DEM,
                                                     "VALUETYPE").getOutput(0))

            if pixelType == 9:  # 32 bit float
                arcpy.CopyRaster_management(DEM,
                                            rawDEM,
                                            pixel_type="32_BIT_FLOAT")
            else:
                log.info("Converting DEM to 32 bit floating type")
                arcpy.CopyRaster_management(DEM, DEMTemp)
                arcpy.CopyRaster_management(Float(DEMTemp),
                                            rawDEM,
                                            pixel_type="32_BIT_FLOAT")

                # Delete temporary DEM
                arcpy.Delete_management(DEMTemp)

            # Calculate statistics for raw DEM
            arcpy.CalculateStatistics_management(rawDEM)

            progress.logProgress(codeBlock, outputFolder)

        ################################
        ### Create multiplier raster ###
        ################################

        codeBlock = 'Create multiplier raster'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            Reclassify(rawDEM, "Value", RemapRange([[-999999.9, 999999.9, 1]]),
                       "NODATA").save(multRaster)
            progress.logProgress(codeBlock, outputFolder)

        codeBlock = 'Calculate slope in percent'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            intSlopeRawPer = Slope(rawDEM, "PERCENT_RISE")
            intSlopeRawPer.save(slopeRawPer)
            del intSlopeRawPer

            log.info('Slope calculated in percent')

            progress.logProgress(codeBlock, outputFolder)

        if reconDEM is True:

            #######################
            ### Burn in streams ###
            #######################

            codeBlock = 'Burn in streams'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                # Recondition DEM (burning stream network in using AGREE method)
                log.info("Burning streams into DEM.")
                reconditionDEM.function(rawDEM, streamInput, smoothDropBuffer,
                                        smoothDrop, streamDrop, burnedDEM)
                log.info("Completed stream network burn in to DEM")

                progress.logProgress(codeBlock, outputFolder)

            ##################
            ### Fill sinks ###
            ##################

            codeBlock = 'Fill sinks'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                Fill(burnedDEM).save(hydDEM)

                log.info("Sinks in DEM filled")
                progress.logProgress(codeBlock, outputFolder)

            ######################
            ### Flow direction ###
            ######################

            codeBlock = 'Flow direction'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                FlowDirection(hydDEM, "NORMAL").save(hydFDR)
                log.info("Flow Direction calculated")
                progress.logProgress(codeBlock, outputFolder)

            #################################
            ### Flow direction in degrees ###
            #################################

            codeBlock = 'Flow direction in degrees'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                # Save flow direction raster in degrees (for display purposes)
                degreeValues = RemapValue([[1, 90], [2, 135], [4, 180],
                                           [8, 225], [16, 270], [32, 315],
                                           [64, 0], [128, 45]])
                Reclassify(hydFDR, "Value", degreeValues,
                           "NODATA").save(hydFDRDegrees)
                progress.logProgress(codeBlock, outputFolder)

            #########################
            ### Flow accumulation ###
            #########################

            codeBlock = 'Flow accumulation'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                hydFACTemp = FlowAccumulation(hydFDR, "", "FLOAT")
                hydFACTemp.save(hydFAC)
                arcpy.sa.Int(Raster(hydFAC)).save(hydFACInt)  # integer version
                log.info("Flow Accumulation calculated")

                progress.logProgress(codeBlock, outputFolder)

            #######################
            ### Calculate slope ###
            #######################

            codeBlock = 'Calculate slope on burned DEM'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                intSlopeHydDeg = Slope(hydDEM, "DEGREE")
                intSlopeHydDeg.save(slopeHydDeg)
                del intSlopeHydDeg

                intSlopeHydPer = Slope(hydDEM, "PERCENT_RISE")
                intSlopeHydPer.save(slopeHydPer)
                del intSlopeHydPer

                log.info('Slope calculated')

                progress.logProgress(codeBlock, outputFolder)

            ##########################
            ### Create stream file ###
            ##########################

            codeBlock = 'Create stream file'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                # Create accumulation in metres
                streamAccHaFileInt = hydFACTemp * cellSizeDEM * cellSizeDEM / 10000.0
                streamAccHaFileInt.save(streamAccHaFile)
                del streamAccHaFileInt

                # Check stream initiation threshold reached
                streamYes = float(
                    arcpy.GetRasterProperties_management(
                        streamAccHaFile, "MAXIMUM").getOutput(0))

                if streamYes > float(minAccThresh):

                    reclassifyRanges = RemapRange(
                        [[-1000000, float(minAccThresh), 1],
                         [float(minAccThresh), 9999999999, 0]])

                    outLUCIstream = Reclassify(streamAccHaFile, "VALUE",
                                               reclassifyRanges)
                    outLUCIstream.save(streamInvRas)
                    del outLUCIstream
                    log.info("Stream raster for input to LUCI created")

                    # Create stream file for display
                    reclassifyRanges = RemapRange(
                        [[0, float(minAccThresh), "NODATA"],
                         [float(minAccThresh),
                          float(majAccThresh), 1],
                         [float(majAccThresh), 99999999999999, 2]])

                    streamsRaster = Reclassify(streamAccHaFile, "Value",
                                               reclassifyRanges, "NODATA")
                    streamOrderRaster = StreamOrder(streamsRaster, hydFDR,
                                                    "STRAHLER")
                    streamsRaster.save(streamsRasterFile)

                    # Create two streams feature classes - one for analysis and one for display
                    arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR,
                                             streams, 'NO_SIMPLIFY')
                    arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR,
                                             streamDisplay, 'SIMPLIFY')

                    # Rename grid_code column to 'Strahler'
                    for streamFC in [streams, streamDisplay]:

                        arcpy.AddField_management(streamFC, "Strahler", "LONG")
                        arcpy.CalculateField_management(
                            streamFC, "Strahler", "!GRID_CODE!", "PYTHON_9.3")
                        arcpy.DeleteField_management(streamFC, "GRID_CODE")

                    del streamsRaster
                    del streamOrderRaster

                    log.info("Stream files created")

                else:

                    warning = 'No streams initiated'
                    log.warning(warning)
                    common.logWarnings(outputFolder, warning)

                    # Create LUCIStream file from multiplier raster (i.e. all cells have value of 1 = no stream)
                    arcpy.CopyRaster_management(multRaster, streamInvRas)

                progress.logProgress(codeBlock, outputFolder)

        codeBlock = 'Clip data, build pyramids and generate statistics'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            try:
                # Generate pyramids and stats
                arcpy.BuildPyramidsandStatistics_management(
                    outputFolder, "", "", "", "")
                log.info(
                    "Pyramids and Statistics calculated for all LUCI topographical information rasters"
                )

            except Exception:
                log.info("Warning - could not generate all raster statistics")

            progress.logProgress(codeBlock, outputFolder)

        # Reset snap raster
        arcpy.env.snapRaster = None

    except Exception:
        log.error("Error in preprocessing operations")
        raise