def roadsNonMonoCheck():
    try:
        # Check out license
        print('The result of CheckExtension("Highways") is ' + str(CheckExtension("Highways")) + '.')
        if CheckExtension("Highways") == 'Available':
            CheckOutExtension("Highways")
            
            # Do the license check before the deletion, so that you don't
            # remove data and then not put it back in the case that the
            # license is not available.
            from arcpy import DetectNonMonotonicRoutes_locref
            
            if Exists(nonMonotonicOutputGDB):
                try:
                    Delete_management(nonMonotonicOutputGDB)
                except:
                    pass
            else:
                pass
            
            nonMonotonicOutputGDBName = returnGDBOrSDEName(nonMonotonicOutputGDB)
            
            CreateFileGDB_management(mainFolder, nonMonotonicOutputGDBName)
            time.sleep(1)
            
            DetectNonMonotonicRoutes_locref(networkToReview, nonMonotonicOutputFC, "Any", "F_Date", "T_Date", "SourceRouteId")
            
            print("The Roads & Highways Non-Monotonic routes check for " + str(networkToReview) + " has completed.\n")
            
        else:
            print('The Roads & Highways extension is not currently available.')
            print('Skipping R&H Non-Monotonicity check.')
        
    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        try:
            # Check the license back in
            CheckInExtension("Highways")
        except:
            pass
def main():
    # added variable for FileGDB creation
    output_gdb1_name = returnGDBOrSDEName(output_gdb1)
    
    # other prep statements
    arcpy.env.overwriteOutput = True
    
    # If the outputGDB doesn't already exist, create it now.
    if arcpy.Exists(output_gdb1):
        arcpy.Delete_management(output_gdb1)
    else:
        pass
    time.sleep(10)
    arcpy.CreateFileGDB_management(mainFolder, output_gdb1_name)
    
    AnalyzePolylines(fc1, route_id_fld1, output_gdb1)
Exemplo n.º 3
0
def main():
    # added variable for FileGDB creation
    output_gdb1_name = returnGDBOrSDEName(output_gdb1)

    # other prep statements
    arcpy.env.overwriteOutput = True

    # If the outputGDB doesn't already exist, create it now.
    if arcpy.Exists(output_gdb1):
        arcpy.Delete_management(output_gdb1)
    else:
        pass
    time.sleep(10)
    arcpy.CreateFileGDB_management(mainFolder, output_gdb1_name)

    AnalyzePolylines(fc1, route_id_fld1, output_gdb1)
def mainWithPrefixSets():
    # For now, use globals.
    # Make into prettier/prefixSetFirst Python later, that uses
    # dictionary values for everything, including default dictionary values
    # for when the usePrefixSetTestingAndReporting value is false.
    # Start a loop
    for prefixKeyItem in prefixSetErrorReportingDict.keys():
        global prefixKeyString
        prefixKeyString = prefixKeyItem
        # Then, set the necessary variables from the dict
        # for the current prefix set in the list.
        prefixKeyItemDict = outerTestDict[prefixKeyItem]
        csipDict = prefixKeyItemDict["csipDict"]
        
        global fc1
        csip_routes = csipDict["csip_routes"]
        fc1 = csip_routes
        print("fc1 = " + str(fc1) + ".")
        
        global route_id_fld1
        csip_route_id_fld1 = csipDict["csip_route_id_fld1"]
        route_id_fld1 = csip_route_id_fld1
        print("route_id_fld1 = " + str(route_id_fld1) + ".")
        
        global output_gdb1
        csip_output_gdb1 = csipDict["csip_output_gdb1"]
        output_gdb1 = csip_output_gdb1
        print("output_gdb1 = " + str(output_gdb1) + ".")
        
        # added variable for FileGDB creation
        output_gdb1_name = returnGDBOrSDEName(output_gdb1)
        
        # other prep statements
        arcpy.env.overwriteOutput = True
        
        # If the outputGDB already exists, delete it.
        if arcpy.Exists(output_gdb1):
            arcpy.Delete_management(output_gdb1)
        else:
            pass
        time.sleep(10)
        
        # Then, (re)create it.
        arcpy.CreateFileGDB_management(mainFolder, output_gdb1_name)
        
        AnalyzePolylines(fc1, route_id_fld1, output_gdb1)
Exemplo n.º 5
0
def mainWithPrefixSets():
    # For now, use globals.
    # Make into prettier/prefixSetFirst Python later, that uses
    # dictionary values for everything, including default dictionary values
    # for when the usePrefixSetTestingAndReporting value is false.
    # Start a loop
    for prefixKeyItem in prefixSetErrorReportingDict.keys():
        global prefixKeyString
        prefixKeyString = prefixKeyItem
        # Then, set the necessary variables from the dict
        # for the current prefix set in the list.
        prefixKeyItemDict = outerTestDict[prefixKeyItem]
        csipDict = prefixKeyItemDict["csipDict"]

        global fc1
        csip_routes = csipDict["csip_routes"]
        fc1 = csip_routes
        print("fc1 = " + str(fc1) + ".")

        global route_id_fld1
        csip_route_id_fld1 = csipDict["csip_route_id_fld1"]
        route_id_fld1 = csip_route_id_fld1
        print("route_id_fld1 = " + str(route_id_fld1) + ".")

        global output_gdb1
        csip_output_gdb1 = csipDict["csip_output_gdb1"]
        output_gdb1 = csip_output_gdb1
        print("output_gdb1 = " + str(output_gdb1) + ".")

        # added variable for FileGDB creation
        output_gdb1_name = returnGDBOrSDEName(output_gdb1)

        # other prep statements
        arcpy.env.overwriteOutput = True

        # If the outputGDB already exists, delete it.
        if arcpy.Exists(output_gdb1):
            arcpy.Delete_management(output_gdb1)
        else:
            pass
        time.sleep(10)

        # Then, (re)create it.
        arcpy.CreateFileGDB_management(mainFolder, output_gdb1_name)

        AnalyzePolylines(fc1, route_id_fld1, output_gdb1)
from operator import itemgetter, attrgetter, methodcaller  ## For sorting.

from arcpy import Array as arcpyArray, Describe, env, Point, Polyline

from arcpy.da import SearchCursor as daSearchCursor, UpdateCursor as daUpdateCursor

from pathFunctions import (returnFeatureClass, returnGDBOrSDEName)

##import math

#from datareviewerchecks_config import (localRouteInputLines,
#    localRouteOutputLines)

routeFeaturesFC = r'C:\GIS\Geodatabases\KHUB\Manual_Edit_Aggregation\2017-03-08\Data_Mirroring_12B_AllRegions_IntegrationTest_Source.gdb\All_Road_Centerlines_Copy'

if returnGDBOrSDEName(routeFeaturesFC) != None:
    env.workspace = returnGDBOrSDEName(routeFeaturesFC)
else:
    pass

print(env.workspace)

#currentXYTolerance = env.XYTolerance

tempDesc = Describe(routeFeaturesFC)
currentSR = tempDesc.spatialReference
currentXYTolerance = currentSR.XYTolerance
print("currentXYTolerance: " + str(currentXYTolerance))
try:
    del tempDesc
except:
def routeCreation():
    env.workspace = returnGDBOrSDEPath(createRoutesOutputFC)
    env.overwriteOutput = 1
    '''
    # Need to match what Transcend used. -- Done.
    routeId = 'SourceRouteId'
    measureSource = 'TWO_FIELDS'
    fMeasureField = 'SourceFromMeasure'
    tMeasureField = 'SourceToMeasure'
    coordinatePriority = 'UPPER_LEFT'
    measureFactor = 1
    measureOffset = 0
    ignoreGaps = True
    buildIndex = True
    '''
    # Modifications for County Target Network.
    routeId = 'TargetCountyLRSKey'
    measureSource = 'Shapelength'
    #fMeasureField = 'SourceFromMeasure'
    #tMeasureField = 'SourceToMeasure'
    coordinatePriority = 'LOWER_LEFT'
    measureFactor = 1
    measureOffset = 0
    ignoreGaps = True
    buildIndex = True

    routesOutputGDB = returnGDBOrSDEPath(createRoutesOutputFC)
    routesOutputGDBName = returnGDBOrSDEName(routesOutputGDB)
    # Need to implement a new path function to get the GDB's folder.
    routesOutputGDBFolder = mainFolder
    if Exists(routesOutputGDB):
        Delete_management(routesOutputGDB)
    else:
        pass
    CreateFileGDB_management(routesOutputGDBFolder, routesOutputGDBName)

    # Checking to see if the copy for routes output exists.
    # If so, remove it.
    #if Exists(createRoutesOutputFC):
    #    Delete_management(createRoutesOutputFC)
    #else:
    #    pass

    print("Creating the lrs routes.")
    # CreateRoutes_lr GP Tool
    CreateRoutes_lr(createRoutesInputFC, routeId, createRoutesOutputFC,
                    measureSource, fMeasureField, tMeasureField,
                    coordinatePriority, measureFactor, measureOffset,
                    ignoreGaps, buildIndex)

    print("Adding date fields to " + returnFeatureClass(createRoutesOutputFC) +
          ".")
    #Addfields:
    AddField_management(createRoutesOutputFC, "F_Date", "DATE", "", "", "",
                        "F_Date", nullable)
    pyDateExpression = '''def pyFindTheDate():
        import time
        return time.strftime("%Y/%m/%d")'''

    CalculateField_management(createRoutesOutputFC, "F_Date",
                              "pyFindTheDate()", "PYTHON_9.3",
                              pyDateExpression)
    # T_Date (Date)
    AddField_management(createRoutesOutputFC, "T_Date", "DATE", "", "", "",
                        "T_Date", nullable)

    # ---- Add route calibration point creation steps for Start & End points. ----
    MakeFeatureLayer_management(createRoutesInputFC, 'tempFeatureLayer')

    # Checking to see if the output already exists.
    # If so, remove it so that it can be recreated.
    if Exists(startCalibrationPoints):
        Delete_management(startCalibrationPoints)
    else:
        pass
    if Exists(endCalibrationPoints):
        Delete_management(endCalibrationPoints)
    else:
        pass

    # Create 2 fieldInfo objects. Turn off all the fields in each one.
    featureDesc = Describe('tempFeatureLayer')
    if featureDesc.dataType == "FeatureLayer":
        fieldInfo_For_Start_CP_Fields = featureDesc.fieldInfo
        fieldInfo_For_End_CP_Fields = featureDesc.fieldInfo
        # Use the count property to iterate through all the fields
        for index in range(0, fieldInfo_For_Start_CP_Fields.count):
            fieldInfo_For_Start_CP_Fields.setVisible(index, 'HIDDEN')
            fieldInfo_For_End_CP_Fields.setVisible(index, 'HIDDEN')

    # Turn on the needed fields.
    visibile_Fields_For_Start_CP_Layer = [routeId, 'SourceFromMeasure']
    for visibile_Field in visibile_Fields_For_Start_CP_Layer:
        tempIndex = fieldInfo_For_Start_CP_Fields.findFieldByName(
            visibile_Field)
        fieldInfo_For_Start_CP_Fields.setVisible(tempIndex, 'VISIBLE')
    # Create a feature layer that only shows the needed fields.
    MakeFeatureLayer_management(createRoutesInputFC,
                                featureLayerCL_For_Start_CP, "", "",
                                fieldInfo_For_Start_CP_Fields)
    # Use that feature layer to create the 1st calibration point set.
    FeatureVerticesToPoints_management(featureLayerCL_For_Start_CP,
                                       startCalibrationPoints, "START")

    # Turn on the needed fields.
    visibile_Fields_For_End_CP_Layer = [routeId, 'SourceToMeasure']
    for visibile_Field in visibile_Fields_For_End_CP_Layer:
        tempIndex = fieldInfo_For_End_CP_Fields.findFieldByName(visibile_Field)
        fieldInfo_For_End_CP_Fields.setVisible(tempIndex, 'VISIBLE')
    # Create a feature layer that only shows the needed fields.
    MakeFeatureLayer_management(createRoutesInputFC, featureLayerCL_For_End_CP,
                                "", "", fieldInfo_For_End_CP_Fields)
    # Use that feature layer to create the 2nd calibration point set.
    FeatureVerticesToPoints_management(featureLayerCL_For_End_CP,
                                       endCalibrationPoints, "END")

    # ---- Merge the Start & End calibration points. ----
    # Checking to see if the output already exists.
    # If so, remove it so that it can be recreated.
    if Exists(mergedCalibrationPoints):
        Delete_management(mergedCalibrationPoints)
    else:
        pass
    # RoutesSource_Start_CP.SourceRouteId to CalPts_Merge.RouteId
    # RoutesSource_End_CP.SourceRouteId to CalPts_Merge.RouteId
    mcp_Field1 = FieldMap()
    mcp_Field1.addInputField(startCalibrationPoints, routeId)
    mcp_Field1.addInputField(endCalibrationPoints, routeId)
    mcp_Field1_OutField = mcp_Field1.outputField
    mcp_Field1_OutField.name = 'RouteId'
    mcp_Field1_OutField.aliasName = 'RouteId'
    mcp_Field1_OutField.type = 'String'
    mcp_Field1_OutField.length = 50
    mcp_Field1.outputField = mcp_Field1_OutField

    # RoutesSource_Start_CP.SourceFromMeasure to CalPts_Merge.Measure
    mcp_Field2 = FieldMap()
    mcp_Field2.addInputField(startCalibrationPoints, 'SourceFromMeasure')
    mcp_Field2.addInputField(endCalibrationPoints, 'SourceToMeasure')
    mcp_Field2_OutField = mcp_Field2.outputField
    mcp_Field2_OutField.name = 'Measure'
    mcp_Field2_OutField.aliasName = 'Measure'
    mcp_Field2_OutField.type = 'Double'
    mcp_Field2.outputField = mcp_Field2_OutField

    # Create a fieldMappings object for the layer merge.
    calibrationPointsMappings = FieldMappings()
    calibrationPointsMappings.addFieldMap(mcp_Field1)
    calibrationPointsMappings.addFieldMap(mcp_Field2)

    #Merge the points together into a single feature class.
    inputMergeLayers = [startCalibrationPoints, endCalibrationPoints]
    Merge_management(inputMergeLayers, mergedCalibrationPoints,
                     calibrationPointsMappings)

    MakeFeatureLayer_management(mergedCalibrationPoints, 'tempMergedPoints')

    dissolveFields = ["RouteId", "Measure"]
    print('Dissolving points.')
    Dissolve_management('tempMergedPoints', dissolvedCalibrationPoints,
                        dissolveFields, "#", "SINGLE_PART")
Exemplo n.º 8
0
def reviewData():
    try:
        print("Starting the Data Reviewer batch job at:\n" +
              str(reviewerBatchJob) + ".")
        print("For the data located in:\n" + str(workspaceToReview) + ".")
        print(
            "If one of the feature classes, Routes or CalPts, does not exist in the place that the"
        )
        print(
            "data reviewer batch job looks for it, then you will get an 'Unknown Error'."
        )
        print(
            "This can be remedied by updating the data reviewer batch job's workspace settings."
        )
        # Test the data reviewer part:
        if CheckExtension("datareviewer") == 'Available':
            print("Extension availability check complete.")
            CheckOutExtension("datareviewer")

            # Checking to see if the output already exists.
            # If so, remove it so that it can be recreated. -- For the errors, might need a better process, so that
            # it's possible to track where the errors were at the start and how things progressed.
            if Exists(reviewerSessionGDB):
                Delete_management(reviewerSessionGDB)
            else:
                pass

            # Create new geodatabase
            # Replace with returnGDBOrSDEPath(reviewerSessionGDB), returnGDBOrSDEName(reviewerSessionGDB)
            # or similar functions
            CreateFileGDB_management(reviewerSessionGDBFolder,
                                     returnGDBOrSDEName(reviewerSessionGDB))

            # Execute EnableDataReviewer
            EnableDataReviewer_Reviewer(reviewerSessionGDB, "#", "#",
                                        "DEFAULTS")

            # Create a new Reviewer session
            ##CreateReviewerSession_Reviewer (reviewer_workspace, session_name, {session_template}, {duplicate_checking}, {store_geometry}, {username}, {version})
            CreateReviewerSession_Reviewer(reviewerSessionGDB, reviewerSession,
                                           "", "NONE", "STORE_GEOMETRY")

            # execute the batch job
            batchJobResult = ExecuteReviewerBatchJob_Reviewer(
                reviewerSessionGDB, sessionReviewerSession, reviewerBatchJob,
                workspaceToReview)

            print("Data Reviewer batch job complete.")

            # get the output table view from the result object
            outputTable = batchJobResult.getOutput(0)

            print("The output table is called " + str(outputTable.name) + "."
                  )  # prints REVBATCHRUNTABLE

            CheckInExtension("datareviewer")

        else:
            print(
                "The 'datareviewer' extension is not available. Skipping checks."
            )

    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        CheckInExtension("datareviewer")
def main():
    if usePrefixSetTestingAndReporting == True:
        for prefixKey in prefixSetErrorReportingDict.keys():
            prefixAttributeQuery = prefixSetErrorReportingDict[prefixKey]
            prefixKeyItemDict = outerTestDict[prefixKey]
            prefixSetGdbBaseName = prefixKeyItemDict["prefixSetGdbBaseName"]
            prefixSetSourceGDBName = prefixSetGdbBaseName + '_Source.gdb'
            prefixSetSourceGDBLocation = os.path.join(mainFolder,
                                                      prefixSetSourceGDBName)
            routesSourceOutputLocation = os.path.join(
                prefixSetSourceGDBLocation, 'RoutesSource')

            try:
                Delete_management(
                    routesSourceFCAsALayer
                )  #pre-emptive layer delete prior to rebuilding it
                time.sleep(3)
            except:
                pass

            MakeFeatureLayer_management(routesSourceFC, routesSourceFCAsALayer)
            routesSourceSelectionClause = """ """ + str(
                prefixAttributeQuery) + """ """
            SelectLayerByAttribute_management(routesSourceFCAsALayer,
                                              "NEW_SELECTION",
                                              routesSourceSelectionClause)

            if Exists(returnGDBOrSDEPath(routesSourceOutputLocation)):
                pass
            else:
                CreateFileGDB_management(
                    mainFolder,
                    returnGDBOrSDEName(
                        returnGDBOrSDEPath(routesSourceOutputLocation)))
            # Checking to see if the output already exists.
            # If so, remove it.
            if Exists(routesSourceOutputLocation):
                print(
                    "Deleting the previous routesSourceOutputLocation at: \n" +
                    str(routesSourceOutputLocation) + ".")
                Delete_management(routesSourceOutputLocation)
                time.sleep(7)
            else:
                pass
            # Create a new file for the output.
            print(
                "Making a copy of the selection in the routesSourceFCAsALayer at: \n"
                + routesSourceOutputLocation + ".")
            CopyFeatures_management(routesSourceFCAsALayer,
                                    routesSourceOutputLocation)

            #Repeat for each of the other layers to be copied into the new *_Source.gdb.
            for itemToCopy in otherFCsToCopyList:
                itemToCopyInputLocation = os.path.join(
                    returnGDBOrSDEPath(routesSourceFC), itemToCopy)
                itemToCopyOutputLocation = os.path.join(
                    prefixSetSourceGDBLocation, itemToCopy)
                if Exists(itemToCopyOutputLocation):
                    print(
                        "Deleting the previous itemToCopyOutputLocation at: \n"
                        + str(itemToCopyOutputLocation) + ".")
                    Delete_management(itemToCopyOutputLocation)
                    time.sleep(7)
                else:
                    pass
                print("Making a copy of the itemToCopy at: \n" +
                      str(itemToCopyOutputLocation) + ".")
                CopyFeatures_management(itemToCopyInputLocation,
                                        itemToCopyOutputLocation)

    else:
        print(
            "The usePrefixSetTestingAndReporting value is not True. Will not create separate prefix set gdbs."
        )
def exportErrorsToFeatureClasses(reviewTable, originGDB, errorOutputGDB,
                                 errorOutputGDBFolder):
    # Checking to see if the output already exists.
    # If so, remove it.
    if Exists(errorOutputGDB):
        Delete_management(errorOutputGDB)
    else:
        pass

    CreateFileGDB_management(errorOutputGDBFolder,
                             returnGDBOrSDEName(errorOutputGDB))

    previousWorkspace = env.workspace
    env.workspace = errorOutputGDB

    tableFields = ['ORIGINTABLE', 'CHECKTITLE', 'OBJECTID']
    newCursor = daSearchCursor(reviewTable, tableFields)

    revRows = list()

    for rowItem in newCursor:
        revRows.append(list(rowItem))

    try:
        del newCursor
    except:
        pass

    originTableList = list()
    checkTitleList = list()

    for revRowItem in revRows:
        originTableList.append(revRowItem[0])
        checkTitleList.append(revRowItem[1])

    print('Creating sets from the originTable and checkTitle lists.')
    originTableSet = set(originTableList)
    checkTitleSet = set(checkTitleList)
    print('Finished set creation.')

    originTableList = list(originTableSet)
    checkTitleList = list(checkTitleSet)

    tableAndCheckDataObjects = list()
    csvDictOfErrorFeatures = dict()

    for originTableItem in originTableList:
        print('Origin table = ' + originTableItem + '.')
        completeOriginTablePath = os.path.join(originGDB, originTableItem)
        print('The full path to the origin table is ' +
              str(completeOriginTablePath) + '.')
        tableViewName = "ReviewTable_View_" + str(originTableItem)
        originTableWhereClause = """"ORIGINTABLE" = '""" + str(
            originTableItem) + """'"""
        try:
            Delete_management(tableViewName)
        except:
            pass
        MakeTableView_management(reviewTable, tableViewName,
                                 originTableWhereClause)

        for checkTitleItem in checkTitleList:
            print('Check title = ' + checkTitleItem + '.')
            selectionWhereClause = """"CHECKTITLE" = '""" + str(
                checkTitleItem) + """'"""
            SelectLayerByAttribute_management(tableViewName, "NEW_SELECTION",
                                              selectionWhereClause)
            countResult = GetCount_management(tableViewName)
            intCount = int(countResult.getOutput(0))

            if intCount >= 1:
                tempTableAndCheckData = tableAndCheckData(
                    originTableItem, checkTitleItem)
                tableViewFields = ["RECORDID", "OBJECTID"]

                newCursor = daSearchCursor(tableViewName, tableViewFields,
                                           selectionWhereClause)

                newOIDList = list()

                for cursorItem in newCursor:
                    newOIDList.append(cursorItem[1])

                try:
                    del newCursor
                except:
                    pass

                tempTableAndCheckData.listOfOIDsToUse = newOIDList

                tableAndCheckDataObjects.append(tempTableAndCheckData)
            else:
                print("There were no features selected for the " +
                      tableViewName + " table.")

    print("There are " + str(len(tableAndCheckDataObjects)) +
          " different items in the tableAndCheckDataObjects list.")

    for listObject in tableAndCheckDataObjects:

        featureLayerForErrorOutput = 'FeatureClassAsFeatureLayer'

        if Exists(featureLayerForErrorOutput):
            Delete_management(featureLayerForErrorOutput)
        else:
            pass

        fullPathToFeatureClass = os.path.join(originTablesGDB,
                                              listObject.tableName)

        MakeFeatureLayer_management(fullPathToFeatureClass,
                                    featureLayerForErrorOutput)

        # build the selection list & select up to but not more than 999 features at at time
        OIDTotalCounter = 0
        errorOutputWhereClause = """ "OBJECTID" IN ("""

        for errorOID in listObject.listOfOIDsToUse:
            if OIDTotalCounter <= 998:
                errorOutputWhereClause = errorOutputWhereClause + str(
                    errorOID) + """, """
                OIDTotalCounter += 1
            else:
                # Remove the trailing ", " and add a closing parenthesis.
                errorOutputWhereClause = errorOutputWhereClause[:-2] + """) """
                SelectLayerByAttribute_management(featureLayerForErrorOutput,
                                                  "ADD_TO_SELECTION",
                                                  errorOutputWhereClause)

                OIDTotalCounter = 0
                errorOutputWhereClause = """ "OBJECTID" IN ("""
                errorOutputWhereClause = errorOutputWhereClause + str(
                    errorOID) + """, """

        # Remove the trailing ", " and add a closing parenthesis.
        errorOutputWhereClause = errorOutputWhereClause[:-2] + """) """
        SelectLayerByAttribute_management(featureLayerForErrorOutput,
                                          "ADD_TO_SELECTION",
                                          errorOutputWhereClause)

        ##print "Counting..."
        selectedErrorsResult = GetCount_management(featureLayerForErrorOutput)
        selectedErrorsCount = int(selectedErrorsResult.getOutput(0))

        # export the selected data with the correct tableName & checkTitle
        outputFeatureClassName = formatCheckTitle(
            listObject.checkTitle) + "ErrorsFrom_" + listObject.tableName
        fullPathToOutputFeatureClass = os.path.join(errorOutputGDB,
                                                    outputFeatureClassName)

        csvDictOfErrorFeatures[outputFeatureClassName] = str(
            selectedErrorsCount)

        print(
            str(selectedErrorsCount) + "\t features will be written to \t" +
            outputFeatureClassName)
        if selectedErrorsCount >= 1:
            CopyFeatures_management(featureLayerForErrorOutput,
                                    fullPathToOutputFeatureClass)
            time.sleep(25)
            AddField_management(outputFeatureClassName, "OptionalInfo", "TEXT",
                                "", "", 250, "ReviewingInfo", nullable)
        else:
            pass

    # Need to write a short CSV here that tells the number and type of errors.
    print('Writing error information to an error reports file called ' +
          str(errorReportCSVName) + '.')
    try:
        with open(errorReportCSV, 'w') as fHandle:
            for errorFeature in errorReportRowsOrder:
                if errorFeature in csvDictOfErrorFeatures:
                    errorFeatureCount = csvDictOfErrorFeatures[errorFeature]
                    fHandle.write(
                        str(errorFeature) + ', ' + str(errorFeatureCount) +
                        '\n')
                else:
                    fHandle.write(str(errorFeature) + ', ' + str(0) + '\n')
            # Add a blank line to match previous formatting.
            fHandle.write('\n')
    except:
        print("There was an error writing to the file.")

    # Modify this so that it just checks for the existence of the roads
    # and highways check output, rather than relying on the config
    # file for whether or not this should be ran.
    # The config file can tell the full process whether or not
    # to run the R&H check, but the error report should give
    # details on the R&H check whether or not the config file
    # currently states that the R&H check should be ran again
    # were the full process to run.

    env.workspace = previousWorkspace
Exemplo n.º 11
0
def mainProcessFeatureSimplification(inputFeatures, maxCount, outputFeatures):
    # Split the input features into intermediary features:
    # Add each intermediary feature class to a list and
    # pass one feature class of the intermediary features
    # to each subprocess.
    
    # When all of the subprocesses complete, use the
    # list of the intermediary feature classes to append
    # the data into the output features.
    
    countResult = GetCount_management(inputFeatures)
    intCount = int(countResult.getOutput(0))
    # debug print
    print("Counted " + str(intCount) + " features in the " + inputFeatures + " feature class.")
    
    if maxCount > 15000:
        maxCount = 15000
    elif maxCount < 2000:
        maxCount = 7000
    else:
        pass
    
    neededMirrors = intCount / maxCount + 1
    
    # debug print
    print("Will create " + str(neededMirrors) + " reflection gdbs.")
    
    infoForSubprocess = list()
    gdbToCreateList = list()
    
    for countItem in xrange(0, neededMirrors):
        gdbMirrorName = mirrorBaseName + '_' + '0' + str(countItem) + '.gdb'
        gdbMirrorFullPath = os.path.join(mainFolder, gdbMirrorName)
        gdbToCreateList.append(gdbMirrorFullPath)
        try:
            if Exists(gdbMirrorFullPath):
                try:
                    Delete_management(gdbMirrorFullPath)
                except:
                    pass
            else:
                pass
        except:
            pass
        
        CreateFileGDB_management(mainFolder, gdbMirrorName)
        
        # do a selection on the input features here
        # then copyfeatures to get the selected features
        # output to the target gdb.
        
        if Exists(simplifyTempLayer):
            try:
                Delete_management(simplifyTempLayer)
            except:
                pass
        else:
            pass
        
        MakeFeatureLayer_management(inputFeatures, simplifyTempLayer)
        
        currentSelectMin = int(countItem * maxCount) 
        currentSelectMax = int((countItem + 1) * maxCount)
        
        dynSelectClause = """"OBJECTID" >= """ + str(currentSelectMin) + """ AND "OBJECTID" < """ + str(currentSelectMax) + """"""
        
        SelectLayerByAttribute_management(simplifyTempLayer, "NEW_SELECTION", dynSelectClause)
        
        selectedSimplifyFeatures = os.path.join(gdbMirrorFullPath, simplifyInputName)
        
        CopyFeatures_management(simplifyTempLayer, selectedSimplifyFeatures)
        
        subprocessInfoItem = [mainFolder, gdbMirrorFullPath, simplifyAlgorithm, simplifyDistance]
        
        infoForSubprocess.append(subprocessInfoItem)
    
    # Predivide the list of data driven pages that each process needs to run
    # and pass it as a list of exportItems.
    
    coreCount = mp.cpu_count()
    
    # To support running this on the slow AR60, reduce the coreCount used to try to keep
    # this script from crashing there.
    if coreCount >= 3 and useMultithreading == True:
        coreCount = coreCount - 1
        
        print("Starting a multi-threaded job which will use (up to) " + str(coreCount) + " cores at once.")
        
        workPool = mp.Pool(processes=coreCount)
        # Note: This is a different usage of the word map than the one generally used in GIS.
        workPool.map(subProcessFeatureSimplification, infoForSubprocess)
        print("Multi-threaded job's done!")
        
        print("Waiting a few moments before closing down the worker processes...")
        time.sleep(20)
        workPool.close()
        time.sleep(20)
        workPool.join()
        
        print("Worker processes closed.")
        
    else:
        # Don't use multithreading here.
        print("Using the single threaded process for feature simplification.")
        print("This will be slower than the multi-threaded version,")
        print("but it should also be less likely to crash on slower machines")
        print("or those with low core counts.")
        for singleThreadedProcessInfoListItem in infoForSubprocess:
            singleThreadedProcessForSlowMachines(singleThreadedProcessInfoListItem)
        
        print("Waiting a few moments before continuing to the next part of the script...")
        time.sleep(20)
    
    # Delete the output target prior to recreating it and appending data into it.
    if Exists(outputFeatures):
        try:
            Delete_management(outputFeatures)
        except:
            pass
    else:
        pass
    
    # Need the gdb and fc name here from outputFeatures.
    outGDB = returnGDBOrSDEPath(outputFeatures)
    outGDBName = returnGDBOrSDEName(outGDB)
    outGDBFolder = returnGDBOrSDEFolder(outGDB)
    outFCName = returnFeatureClass(outputFeatures)
    
    if not Exists(outGDB):
        CreateFileGDB_management(outGDBFolder, outGDBName)
    
    # Use the inputFeatures as a template.
    CreateFeatureclass_management(outGDB, outFCName, "", inputFeatures)
    
    appendOutputFCList = list()
    
    for gdbToCreate in gdbToCreateList:
        appendOutputFC = os.path.join(gdbToCreate, 'simplificationOutput')
        appendOutputFCList.append(appendOutputFC)
    
    # Do appends here, then sleep again for a bit.
    # Shouldn't need a field mapping since they should all be the same.
    Append_management(appendOutputFCList, outputFeatures, "NO_TEST")
    
    print "Waiting a few moments to be sure that all of the locks have been removed prior to deleting the reflection gdbs..."
    time.sleep(20)
    
    # Then remove the mirror gdbs.
    for gdbToCreate in gdbToCreateList:
        try:
            if Exists(gdbToCreate):
                try:
                    Delete_management(gdbToCreate)
                except:
                    pass
            else:
                pass
        except:
            pass