Exemplo n.º 1
0
def initProgress(folder, rerun):

    try:
        xmlFile = getProgressFilenames(folder).xmlFile

        if not rerun:
            removeFile(xmlFile)

        # Create file if it does not exist
        if not os.path.exists(xmlFile):
            root = ET.Element("data")
            tree = ET.ElementTree(root)
            tree.write(xmlFile, encoding="utf-8", xml_declaration=True)
        else:
            # Open file for reading
            tree = ET.parse(xmlFile)
            root = tree.getroot()

        # Write scratch GDB to XML file if not already present
        scratchGDBNode = root.find('ScratchGDB')
        if scratchGDBNode is None:
            scratchGDBNode = createXMLNode(root, 'ScratchGDB')
            scratchGDBNode.text = str(arcpy.env.scratchGDB)

        try:
            # Save the XML file
            tree.write(xmlFile, encoding='utf-8', xml_declaration=True)

        except Exception:
            log.warning("Problem saving XML file " + str(xmlFile))
            raise

    except Exception:
        log.warning('Could not initialise progress.xml')
Exemplo n.º 2
0
def codeSuccessfullyRun(codeBlockName, folder, rerun):

    try:
        success = False
        xmlFile = getProgressFilenames(folder).xmlFile

        if rerun:
            try:
                # Open file for reading
                tree = ET.parse(xmlFile)
                root = tree.getroot()
                codeBlockNodes = root.findall('CodeBlock')

            except Exception:
                removeFile(xmlFile)

            else:
                codeBlockNames = []
                for codeBlockNode in codeBlockNodes:

                    names = codeBlockNode.findall('Name')
                    for name in names:
                        codeBlockNames.append(name.text)

                if codeBlockName in codeBlockNames:
                    success = True

        if success:
            log.info('Skipping: ' + str(codeBlockName))

        return success

    except Exception:
        log.warning('Could not check if code block was previously run')
        log.warning(traceback.format_exc())
Exemplo n.º 3
0
def checkInputRaster(raster, baseFolder):

    noDataInRaster = int(arcpy.GetRasterProperties_management(raster, "ALLNODATA").getOutput(0)) # returns 0 (false) or 1 (true)
    if noDataInRaster:
        warning = 'Input raster ' + raster + ' only contains NoData values. Please check file.'
        log.warning(warning)
        common.logWarnings(baseFolder, warning)
Exemplo n.º 4
0
def checkInputFC(featureClass, baseFolder):

    # Find number of rows in feature class. arcpy.GetCount_management(featureClass).getOutput(0) should work but doesn't in some cases. KM
    rows = [row for row in arcpy.da.SearchCursor(featureClass, "*")]
    numFeatures = len(rows)
    if numFeatures == 0:
        warning = 'Input feature class ' + featureClass + ' contains no data. Please check file'
        log.warning(warning)
        common.logWarnings(baseFolder, warning)
Exemplo n.º 5
0
def createXMLNode(parent, name):

    try:
        newNode = ET.Element(name)
        parent.append(newNode)

        return newNode

    except Exception:
        log.warning("Could not create node " + name)
        raise
Exemplo n.º 6
0
def getProgressFilenames(folder):

    try:

        class Files:
            ''' Declare filenames here '''
            def __init__(self):
                self.xmlFile = "progress.xml"

        return common.addPath(Files(), folder)

    except Exception:
        log.warning("Error occurred while generating filenames")
        raise
Exemplo n.º 7
0
def clipLargeDEM(DEM, StudyAreaMask):

    try:
        # Work out filesize of DEM
        cols = arcpy.GetRasterProperties_management(DEM, "COLUMNCOUNT").getOutput(0)
        rows = arcpy.GetRasterProperties_management(DEM, "ROWCOUNT").getOutput(0)
        bitType = int(arcpy.GetRasterProperties_management(DEM, "VALUETYPE").getOutput(0))

        if bitType <= 4:    # 8 bit
            bytes = 1
        elif bitType <= 6:  # 16 bit
            bytes = 2
        elif bitType <= 9:  # 32 bit
            bytes = 4
        elif bitType <= 14: # 64 bit
            bytes = 8
        else:
            bytes = 4

        sizeInGb = int(cols) * int(rows) * bytes / (1024 * 1024 * 1024)

        if sizeInGb > 1: # 1Gb
            log.info('Clipping DEM as original DEM is too large (approximately ' + str(sizeInGb) + 'Gb)')

            # Buffer study area mask by 5km            
            bufferSAM = os.path.join(arcpy.env.scratchGDB, "bufferSAM")
            arcpy.Buffer_analysis(StudyAreaMask, bufferSAM, "5000 meters", "FULL", "ROUND", "ALL")

            # Clip DEM to this buffered area
            bufferedDEM = os.path.join(arcpy.env.scratchWorkspace, "bufferedDEM")
            extent = arcpy.Describe(bufferSAM).extent
            arcpy.Clip_management(DEM, str(extent), bufferedDEM, bufferSAM, nodata_value="-3.402823e+038", clipping_geometry="ClippingGeometry", maintain_clipping_extent="NO_MAINTAIN_EXTENT")

            log.warning('Since the DEM is large, reconditioning and preprocessing operations may take a long time')

            return bufferedDEM
        else:
            return DEM

    except Exception:
        log.error("Error occurred when determining if DEM needs to be clipped or not")
        raise
Exemplo n.º 8
0
def checkCoverage(maskA, maskB, inputFile=None):

    # Checks the coverage between polygons mask A and mask B

    # Set temporary variables
    prefix = os.path.join(arcpy.env.scratchGDB, "cover_")
    maskACopy = prefix + "maskACopy"
    maskBCopy = prefix + "maskBCopy"
    maskACoverage = prefix + "maskACoverage"

    # Copy maskA and maskBCopy to temporary variables
    arcpy.CopyFeatures_management(maskA, maskACopy)
    arcpy.CopyFeatures_management(maskB, maskBCopy)

    percOut = 0

    if checkLicenceLevel('Advanced') or arcpy.ProductInfo() == "ArcServer":

        # Check if the fields Area_ha exist in maskACopy and maskBCopy
        # If they do, delete them

        maskAFields = arcpy.ListFields(maskACopy)
        for field in maskAFields:
            if field.name == "Area_ha":
                arcpy.DeleteField_management(maskACopy, "Area_ha")

        maskBFields = arcpy.ListFields(maskBCopy)
        for field in maskBFields:
            if field.name == "Area_ha":
                arcpy.DeleteField_management(maskBCopy, "Area_ha")

        # Delete fields area_ha in in maskACopy and maskBCopy

        # Calculate total area of the coverage (study area or contributing area)
        arcpy.AddField_management(maskBCopy, "Area_ha", "DOUBLE")
        exp = "!SHAPE.AREA@HECTARES!"
        arcpy.CalculateField_management(maskBCopy, "Area_ha", exp,
                                        "PYTHON_9.3")

        maskBCopyVal = 0
        for row in arcpy.da.SearchCursor(maskBCopy, ["Area_ha"]):
            maskBCopyVal += float(row[0])

        # Use symmetrical difference to find which areas in maskA and maskBCopyCopy do not overlap
        arcpy.SymDiff_analysis(maskACopy, maskBCopy, maskACoverage)

        # Check if there are non-overlapping areas in the land use and coverage
        rows = [row for row in arcpy.da.SearchCursor(maskACoverage, "*")]
        numFeatures = len(rows)

        if numFeatures != 0:  # There is a discrepancy between mask A and mask B

            # Calculate the area of the polygons in the discrepancy
            arcpy.AddField_management(maskACoverage, "Sliver", "DOUBLE")
            exp = "!SHAPE.AREA@HECTARES!"
            arcpy.CalculateField_management(maskACoverage, "Sliver", exp,
                                            "PYTHON_9.3")

            area = 0
            for row in arcpy.da.SearchCursor(maskACoverage, ["Sliver"]):
                area += float(row[0])

            # Calculate percentage of the coverage area that the discrepancy covers
            percOut = float(area) / float(maskBCopyVal) * 100.0

            if percOut > 2.5:
                warning = 'Input data coverage is less tha 97.5 percent of the study area'
                log.warning(warning)

                warning = 'This may cause discrepancies in later calculations'
                log.warning(warning)

                warning = 'Please check this input: ' + str(inputFile)
                log.warning(warning)

        # Delete temporary masks
        arcpy.Delete_management(maskACopy)
        arcpy.Delete_management(maskBCopy)
        arcpy.Delete_management(maskACoverage)

    else:
        log.warning(
            'Coverage discrepancies between soil, land use, and coverage extent not checked as advanced license not present.'
        )
        log.warning(
            'Please ensure the soil and land use shapefile cover at least 97.5 percent of the coverage extent'
        )
Exemplo n.º 9
0
def runSystemChecks(folder=None, rerun=False):

    import LUCI_SEEA.lib.progress as progress

    # Set overwrite output
    arcpy.env.overwriteOutput = True

    # Check spatial analyst licence is available
    if arcpy.CheckExtension("Spatial") == "Available":
        arcpy.CheckOutExtension("Spatial")
    else:
        raise RuntimeError(
            "Spatial Analyst license not present or could not be checked out")

    ### Set workspaces so that temporary files are written to the LUCI scratch geodatabase ###
    if arcpy.ProductInfo() == "ArcServer":
        log.info('arcpy.env.scratchWorkspace on server: ' +
                 str(arcpy.env.scratchWorkspace))

        # Set current workspace
        arcpy.env.workspace = arcpy.env.scratchGDB
    else:

        # If rerunning a tool, check if scratch workspace has been set. If it has, use it as it is (with temporary rasters and feature classes from the previous run).
        scratchGDB = None

        if rerun:
            xmlFile = progress.getProgressFilenames(folder).xmlFile

            if os.path.exists(xmlFile):
                scratchGDB = readXML(xmlFile, 'ScratchGDB')

                if not arcpy.Exists(scratchGDB):
                    log.error('Previous scratch GDB ' + str(scratchGDB) +
                              ' does not exist. Tool cannot be rerun.')
                    log.error('Exiting tool')
                    sys.exit()

        if scratchGDB is None:

            # Set scratch path from values in user settings file if values present
            scratchPath = None
            try:
                if os.path.exists(configuration.userSettingsFile):

                    tree = ET.parse(configuration.userSettingsFile)
                    root = tree.getroot()
                    scratchPath = root.find("scratchPath").text

            except Exception:
                pass  # If any errors occur, ignore them. Just use the default scratch path.

            # Set scratch path if needed
            if scratchPath is None:
                scratchPath = configuration.scratchPath

            # Create scratch path folder
            if not os.path.exists(scratchPath):
                os.makedirs(scratchPath)

            # Remove old date/time stamped scratch folders if they exist and if they do not contain lock ArcGIS lock files.
            for root, dirs, files in os.walk(scratchPath):
                for dir in dirs:

                    # Try to rename folder. If this is possible then no locks are held on it and it can then be removed.
                    try:
                        fullDirPath = os.path.join(scratchPath, dir)
                        renamedDir = os.path.join(scratchPath,
                                                  'ready_for_deletion')
                        os.rename(fullDirPath, renamedDir)
                    except Exception:
                        # import traceback
                        # log.warning(traceback.format_exc())
                        pass
                    else:
                        try:
                            shutil.rmtree(renamedDir)
                        except Exception:
                            # import traceback
                            # log.warning(traceback.format_exc())
                            pass

            # Create new date/time stamped scratch folder for the scratch GDB to live in
            dateTimeStamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
            scratchGDBFolder = os.path.join(scratchPath,
                                            'scratch_' + dateTimeStamp)
            if not os.path.exists(scratchGDBFolder):
                os.mkdir(scratchGDBFolder)

            # Create scratch GDB
            scratchGDB = os.path.join(scratchGDBFolder, 'scratch.gdb')
            if not os.path.exists(scratchGDB):
                arcpy.CreateFileGDB_management(os.path.dirname(scratchGDB),
                                               os.path.basename(scratchGDB))

            # Try to remove old scratch path if still exists
            try:
                shutil.rmtree(configuration.oldScratchPath, ignore_errors=True)
            except Exception:
                pass

        # Set scratch and current workspaces
        arcpy.env.scratchWorkspace = scratchGDB
        arcpy.env.workspace = scratchGDB

        # Scratch folder
        scratchFolder = arcpy.env.scratchFolder
        if not os.path.exists(scratchFolder):
            os.mkdir(scratchFolder)

        # Remove all in_memory data sets
        arcpy.Delete_management("in_memory")

    # Check disk space for disk with scratch workspace
    freeSpaceGb = 3
    if getFreeDiskSpaceGb(arcpy.env.scratchWorkspace) < freeSpaceGb:
        log.warning("Disk containing scratch workspace has less than " +
                    str(freeSpaceGb) +
                    "Gb free space. This may cause this tool to fail.")
Exemplo n.º 10
0
def function(outputFolder,
             DEM,
             studyAreaMask,
             streamInput,
             minAccThresh,
             majAccThresh,
             smoothDropBuffer,
             smoothDrop,
             streamDrop,
             reconDEM,
             rerun=False):

    try:
        # Set environment variables
        arcpy.env.compression = "None"
        arcpy.env.snapRaster = DEM
        arcpy.env.extent = DEM
        arcpy.env.cellSize = arcpy.Describe(DEM).meanCellWidth

        ########################
        ### Define filenames ###
        ########################

        files = common.getFilenames('preprocess', outputFolder)

        rawDEM = files.rawDEM
        hydDEM = files.hydDEM
        hydFDR = files.hydFDR
        hydFDRDegrees = files.hydFDRDegrees
        hydFAC = files.hydFAC
        streamInvRas = files.streamInvRas  # Inverse stream raster - 0 for stream, 1 for no stream
        streams = files.streams
        streamDisplay = files.streamDisplay
        multRaster = files.multRaster
        hydFACInt = files.hydFACInt
        slopeRawDeg = files.slopeRawDeg
        slopeRawPer = files.slopeRawPer
        slopeHydDeg = files.slopeHydDeg
        slopeHydPer = files.slopeHydPer

        ###############################
        ### Set temporary variables ###
        ###############################

        prefix = os.path.join(arcpy.env.scratchGDB, "base_")

        cellSizeDEM = float(arcpy.env.cellSize)

        burnedDEM = prefix + "burnedDEM"
        streamAccHaFile = prefix + "streamAccHa"
        rawFDR = prefix + "rawFDR"
        allPolygonSinks = prefix + "allPolygonSinks"
        DEMTemp = prefix + "DEMTemp"
        hydFACTemp = prefix + "hydFACTemp"

        # Saved as .tif as did not save as ESRI grid on server
        streamsRasterFile = os.path.join(arcpy.env.scratchFolder,
                                         "base_") + "StreamsRaster.tif"

        ###############################
        ### Save DEM to base folder ###
        ###############################

        codeBlock = 'Save DEM'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Save DEM to base folder as raw DEM with no compression
            pixelType = int(
                arcpy.GetRasterProperties_management(DEM,
                                                     "VALUETYPE").getOutput(0))

            if pixelType == 9:  # 32 bit float
                arcpy.CopyRaster_management(DEM,
                                            rawDEM,
                                            pixel_type="32_BIT_FLOAT")
            else:
                log.info("Converting DEM to 32 bit floating type")
                arcpy.CopyRaster_management(DEM, DEMTemp)
                arcpy.CopyRaster_management(Float(DEMTemp),
                                            rawDEM,
                                            pixel_type="32_BIT_FLOAT")

                # Delete temporary DEM
                arcpy.Delete_management(DEMTemp)

            # Calculate statistics for raw DEM
            arcpy.CalculateStatistics_management(rawDEM)

            progress.logProgress(codeBlock, outputFolder)

        ################################
        ### Create multiplier raster ###
        ################################

        codeBlock = 'Create multiplier raster'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            Reclassify(rawDEM, "Value", RemapRange([[-999999.9, 999999.9, 1]]),
                       "NODATA").save(multRaster)
            progress.logProgress(codeBlock, outputFolder)

        codeBlock = 'Calculate slope in percent'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            intSlopeRawPer = Slope(rawDEM, "PERCENT_RISE")
            intSlopeRawPer.save(slopeRawPer)
            del intSlopeRawPer

            log.info('Slope calculated in percent')

            progress.logProgress(codeBlock, outputFolder)

        if reconDEM is True:

            #######################
            ### Burn in streams ###
            #######################

            codeBlock = 'Burn in streams'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                # Recondition DEM (burning stream network in using AGREE method)
                log.info("Burning streams into DEM.")
                reconditionDEM.function(rawDEM, streamInput, smoothDropBuffer,
                                        smoothDrop, streamDrop, burnedDEM)
                log.info("Completed stream network burn in to DEM")

                progress.logProgress(codeBlock, outputFolder)

            ##################
            ### Fill sinks ###
            ##################

            codeBlock = 'Fill sinks'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                Fill(burnedDEM).save(hydDEM)

                log.info("Sinks in DEM filled")
                progress.logProgress(codeBlock, outputFolder)

            ######################
            ### Flow direction ###
            ######################

            codeBlock = 'Flow direction'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                FlowDirection(hydDEM, "NORMAL").save(hydFDR)
                log.info("Flow Direction calculated")
                progress.logProgress(codeBlock, outputFolder)

            #################################
            ### Flow direction in degrees ###
            #################################

            codeBlock = 'Flow direction in degrees'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                # Save flow direction raster in degrees (for display purposes)
                degreeValues = RemapValue([[1, 90], [2, 135], [4, 180],
                                           [8, 225], [16, 270], [32, 315],
                                           [64, 0], [128, 45]])
                Reclassify(hydFDR, "Value", degreeValues,
                           "NODATA").save(hydFDRDegrees)
                progress.logProgress(codeBlock, outputFolder)

            #########################
            ### Flow accumulation ###
            #########################

            codeBlock = 'Flow accumulation'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                hydFACTemp = FlowAccumulation(hydFDR, "", "FLOAT")
                hydFACTemp.save(hydFAC)
                arcpy.sa.Int(Raster(hydFAC)).save(hydFACInt)  # integer version
                log.info("Flow Accumulation calculated")

                progress.logProgress(codeBlock, outputFolder)

            #######################
            ### Calculate slope ###
            #######################

            codeBlock = 'Calculate slope on burned DEM'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                intSlopeHydDeg = Slope(hydDEM, "DEGREE")
                intSlopeHydDeg.save(slopeHydDeg)
                del intSlopeHydDeg

                intSlopeHydPer = Slope(hydDEM, "PERCENT_RISE")
                intSlopeHydPer.save(slopeHydPer)
                del intSlopeHydPer

                log.info('Slope calculated')

                progress.logProgress(codeBlock, outputFolder)

            ##########################
            ### Create stream file ###
            ##########################

            codeBlock = 'Create stream file'
            if not progress.codeSuccessfullyRun(codeBlock, outputFolder,
                                                rerun):

                # Create accumulation in metres
                streamAccHaFileInt = hydFACTemp * cellSizeDEM * cellSizeDEM / 10000.0
                streamAccHaFileInt.save(streamAccHaFile)
                del streamAccHaFileInt

                # Check stream initiation threshold reached
                streamYes = float(
                    arcpy.GetRasterProperties_management(
                        streamAccHaFile, "MAXIMUM").getOutput(0))

                if streamYes > float(minAccThresh):

                    reclassifyRanges = RemapRange(
                        [[-1000000, float(minAccThresh), 1],
                         [float(minAccThresh), 9999999999, 0]])

                    outLUCIstream = Reclassify(streamAccHaFile, "VALUE",
                                               reclassifyRanges)
                    outLUCIstream.save(streamInvRas)
                    del outLUCIstream
                    log.info("Stream raster for input to LUCI created")

                    # Create stream file for display
                    reclassifyRanges = RemapRange(
                        [[0, float(minAccThresh), "NODATA"],
                         [float(minAccThresh),
                          float(majAccThresh), 1],
                         [float(majAccThresh), 99999999999999, 2]])

                    streamsRaster = Reclassify(streamAccHaFile, "Value",
                                               reclassifyRanges, "NODATA")
                    streamOrderRaster = StreamOrder(streamsRaster, hydFDR,
                                                    "STRAHLER")
                    streamsRaster.save(streamsRasterFile)

                    # Create two streams feature classes - one for analysis and one for display
                    arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR,
                                             streams, 'NO_SIMPLIFY')
                    arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR,
                                             streamDisplay, 'SIMPLIFY')

                    # Rename grid_code column to 'Strahler'
                    for streamFC in [streams, streamDisplay]:

                        arcpy.AddField_management(streamFC, "Strahler", "LONG")
                        arcpy.CalculateField_management(
                            streamFC, "Strahler", "!GRID_CODE!", "PYTHON_9.3")
                        arcpy.DeleteField_management(streamFC, "GRID_CODE")

                    del streamsRaster
                    del streamOrderRaster

                    log.info("Stream files created")

                else:

                    warning = 'No streams initiated'
                    log.warning(warning)
                    common.logWarnings(outputFolder, warning)

                    # Create LUCIStream file from multiplier raster (i.e. all cells have value of 1 = no stream)
                    arcpy.CopyRaster_management(multRaster, streamInvRas)

                progress.logProgress(codeBlock, outputFolder)

        codeBlock = 'Clip data, build pyramids and generate statistics'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            try:
                # Generate pyramids and stats
                arcpy.BuildPyramidsandStatistics_management(
                    outputFolder, "", "", "", "")
                log.info(
                    "Pyramids and Statistics calculated for all LUCI topographical information rasters"
                )

            except Exception:
                log.info("Warning - could not generate all raster statistics")

            progress.logProgress(codeBlock, outputFolder)

        # Reset snap raster
        arcpy.env.snapRaster = None

    except Exception:
        log.error("Error in preprocessing operations")
        raise
Exemplo n.º 11
0
def function(params):

    try:
        ###################
        ### Read inputs ###
        ###################

        pText = common.paramsAsText(params)

        outputFolder = pText[1]
        inputDEM = common.fullPath(pText[2])
        inputStudyAreaMask = pText[3]
        inputLC = pText[4]
        lcCode = pText[5]
        inputSoil = pText[6]
        soilCode = pText[7]
        reconDEM = common.strToBool(pText[8])
        inputStreamNetwork = pText[9]
        streamAccThresh = pText[10]
        riverAccThresh = pText[11]
        smoothDropBuffer = pText[12]
        smoothDrop = pText[13]
        streamDrop = pText[14]
        rerun = common.strToBool(pText[15])

        log.info('Inputs read in')

        ###########################
        ### Tool initialisation ###
        ###########################

        # Create Baseline folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Run system checks
        common.runSystemChecks(outputFolder, rerun)

        # Set up progress log file
        progress.initProgress(outputFolder, rerun)

        # Write input params to XML
        common.writeParamsToXML(params, outputFolder, 'PreprocessDEM')

        log.info('Tool initialised')

        ########################
        ### Define filenames ###
        ########################

        files = common.getFilenames('preprocess', outputFolder)
        studyAreaMask = files.studyareamask
        outputLCras = files.lc_ras
        outputLCvec = files.lc_vec
        outputSoilras = files.soil_ras
        outputSoilvec = files.soil_vec

        ###############################
        ### Set temporary variables ###
        ###############################

        prefix = os.path.join(arcpy.env.scratchGDB, 'base_')

        DEMTemp = prefix + 'DEMTemp'
        clippedDEM = prefix + 'clippedDEM'
        clippedLC = prefix + 'clippedLC'
        clippedSoil = prefix + 'clippedSoil'
        clippedStreamNetwork = prefix + 'clippedStreamNetwork'

        studyAreaMaskTemp = prefix + "studyAreaMaskTemp"
        studyAreaMaskBuff = prefix + "studyAreaMaskBuff"
        studyAreaMaskDiss = prefix + "studyAreaMaskDiss"

        log.info('Temporary variables set')

        # Check formats of inputs
        lcFormat = arcpy.Describe(inputLC).dataType
        soilFormat = arcpy.Describe(inputSoil).dataType

        ###################
        ### Data checks ###
        ###################

        codeBlock = 'Data checks 1'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            inputFiles = [inputDEM, inputStudyAreaMask, inputLC, inputSoil]
            if inputStreamNetwork is not None:
                inputFiles.append(inputStreamNetwork)

            for file in inputFiles:
                common.checkSpatialRef(file)

            # Set environment variables
            arcpy.env.snapRaster = inputDEM
            arcpy.env.cellSize = inputDEM
            arcpy.env.compression = "None"

            cellsizedem = float(
                arcpy.GetRasterProperties_management(inputDEM,
                                                     "CELLSIZEX").getOutput(0))

            # Get spatial references of DEM and study area mask
            DEMSpatRef = arcpy.Describe(inputDEM).SpatialReference
            maskSpatRef = arcpy.Describe(inputStudyAreaMask).SpatialReference

            # Reproject study area mask if it does not have the same coordinate system as the DEM
            if not common.equalProjections(DEMSpatRef, maskSpatRef):

                warning = "Study area mask does not have the same coordinate system as the DEM"
                log.warning(warning)
                common.logWarnings(outputFolder, warning)

                warning = "Mask coordinate system is " + maskSpatRef.Name + " while DEM coordinate system is " + DEMSpatRef.Name
                log.warning(warning)
                common.logWarnings(outputFolder, warning)

                warning = "Reprojecting study area mask to DEM coordinate system"
                log.warning(warning)
                common.logWarnings(outputFolder, warning)

                arcpy.Project_management(inputStudyAreaMask, studyAreaMaskTemp,
                                         DEMSpatRef)
                arcpy.CopyFeatures_management(studyAreaMaskTemp, studyAreaMask)
            else:
                arcpy.CopyFeatures_management(inputStudyAreaMask,
                                              studyAreaMask)

            # If DEM is large, clip it to a large buffer around the study area mask (~5km)
            inputDEM = baseline.clipLargeDEM(inputDEM, studyAreaMask)

            rasterInputFiles = []
            fcInputFiles = []

            # Sort land cover and soil into appropriate arrays based on data type
            if lcFormat in ['RasterDataset', 'RasterLayer']:
                rasterInputFiles.append(inputLC)
                outputLC = os.path.join(outputFolder, 'landcover')

            elif lcFormat in ['ShapeFile', 'FeatureClass']:
                fcInputFiles.append(inputLC)
                outputLC = os.path.join(outputFolder, 'landcover.shp')

            if soilFormat in ['RasterDataset', 'RasterLayer']:
                rasterInputFiles.append(inputSoil)
                outputSoil = os.path.join(outputFolder, 'soil')

            elif soilFormat in ['ShapeFile', 'FeatureClass']:
                fcInputFiles.append(inputSoil)
                outputSoil = os.path.join(outputFolder, 'soil.shp')

            if reconDEM is True and inputStreamNetwork is None:
                log.error(
                    'Cannot recondition the DEM without an input stream network'
                )
                log.error('Please provide an input stream network')
                sys.exit()

            # If the user has provided a stream network, add it to the list of inputs to check
            if inputStreamNetwork is not None:
                fcInputFiles.append(inputStreamNetwork)

            # Check that the inputs contain data
            for ras in rasterInputFiles:
                if ras is not None:

                    # Check file size
                    fileSizeGB = baseline.checkRasterSizeGB(ras)

                    if fileSizeGB < 1.0:
                        baseline.checkInputRaster(ras, outputFolder)

                    else:
                        log.warning(
                            "Cannot check if raster is empty or all NoData because it is too large"
                        )
                        log.warning(
                            "Please ensure this raster is not empty or all NoData: "
                            + str(ras))

            for fc in fcInputFiles:
                if fc is not None:
                    baseline.checkInputFC(fc, outputFolder)

            # Check that the land cover and soil FCs have the linking codes specified by the user
            if lcFormat in ['ShapeFile', 'FeatureClass']:
                if len(arcpy.ListFields(inputLC, lcCode)) != 1:
                    log.error('Field ' + lcCode +
                              'does not exist in feature class ' + inputLC)
                    sys.exit()

            if soilFormat in ['ShapeFile', 'FeatureClass']:
                if len(arcpy.ListFields(inputSoil, soilCode)) != 1:
                    log.error('Field ' + soilCode +
                              'does not exist in feature class ' + inputSoil)
                    sys.exit()

            progress.logProgress(codeBlock, outputFolder)

        ###############################
        ### Tidy up study area mask ###
        ###############################

        codeBlock = 'Tidy up study area mask'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Check how many polygons are in the mask shapefile
            numPolysInMask = int(
                arcpy.GetCount_management(studyAreaMask).getOutput(0))
            if numPolysInMask > 1:

                # Reduce multiple features where possible
                arcpy.Union_analysis(studyAreaMask, studyAreaMaskDiss,
                                     "ONLY_FID", "", "NO_GAPS")
                arcpy.Dissolve_management(studyAreaMaskDiss, studyAreaMask, "",
                                          "", "SINGLE_PART", "DISSOLVE_LINES")

            # Buffer study area mask
            baseline.bufferMask(inputDEM,
                                studyAreaMask,
                                outputStudyAreaMaskBuff=studyAreaMaskBuff)
            log.info('Study area mask buffered')

            progress.logProgress(codeBlock, outputFolder)

        #######################
        ### Clip input data ###
        #######################

        codeBlock = 'Clip inputs'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            baseline.clipInputs(outputFolder,
                                studyAreaMaskBuff,
                                inputDEM,
                                inputLC,
                                inputSoil,
                                inputStreamNetwork,
                                outputDEM=clippedDEM,
                                outputLC=clippedLC,
                                outputSoil=clippedSoil,
                                outputStream=clippedStreamNetwork)

            progress.logProgress(codeBlock, outputFolder)

        ##############################################
        ### Coverage checks on soil and land cover ###
        ##############################################

        codeBlock = 'Do coverage checks on clipped land cover and soil'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            # Do coverage checks on land cover and soil and copy to outputFolder
            if lcFormat in ['RasterDataset', 'RasterLayer']:

                lcMask = common.extractRasterMask(clippedLC)
                common.checkCoverage(lcMask, studyAreaMaskBuff, inputLC)

                arcpy.CopyRaster_management(clippedLC, outputLCras)

            elif lcFormat in ['ShapeFile', 'FeatureClass']:
                lcMask = common.dissolvePolygon(clippedLC)
                common.checkCoverage(lcMask, studyAreaMaskBuff, inputLC)

                arcpy.CopyFeatures_management(clippedLC, outputLCvec)

            if soilFormat in ['RasterDataset', 'RasterLayer']:

                soilMask = common.extractRasterMask(clippedSoil)
                common.checkCoverage(soilMask, studyAreaMaskBuff, inputLC)

                arcpy.CopyRaster_management(clippedSoil, outputSoilras)

            elif soilFormat in ['ShapeFile', 'FeatureClass']:

                soilMask = common.dissolvePolygon(clippedSoil)
                common.checkCoverage(soilMask, studyAreaMaskBuff, inputSoil)

                arcpy.CopyFeatures_management(clippedSoil, outputSoilvec)

            progress.logProgress(codeBlock, outputFolder)

        ######################################
        ### Convert LC and soil to rasters ###
        ######################################

        # For the RUSLE tool, the LC and soil must be in raster format

        codeBlock = 'Convert land cover and soil to rasters'
        if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun):

            if lcFormat in ['ShapeFile', 'FeatureClass']:
                arcpy.PolygonToRaster_conversion(clippedLC, lcCode,
                                                 outputLCras, "CELL_CENTER",
                                                 "", cellsizedem)
                log.info('Land cover raster produced')

            if soilFormat in ['ShapeFile', 'FeatureClass']:
                arcpy.PolygonToRaster_conversion(clippedSoil, soilCode,
                                                 outputSoilras, "CELL_CENTER",
                                                 "", cellsizedem)
                log.info('Soil raster produced')

            # Delete intermediate files
            arcpy.Delete_management(clippedLC)
            arcpy.Delete_management(clippedSoil)

            progress.logProgress(codeBlock, outputFolder)

        ###########################
        ### Run HydTopo process ###
        ###########################

        log.info("*** Preprocessing DEM ***")
        preprocess_dem.function(outputFolder, clippedDEM, studyAreaMask,
                                clippedStreamNetwork, streamAccThresh,
                                riverAccThresh, smoothDropBuffer, smoothDrop,
                                streamDrop, reconDEM, rerun)

    except Exception:
        arcpy.SetParameter(0, False)
        log.exception("Preprocessing DEM functions did not complete")
        raise