def calculateMeasuresForLocalRoutes(routesToMeasure, subsetSelectionQuery):
    # Make a feature layer
    # Select it with the subsetSelectionQuery
    # If the number of selected features is at least 1
    # Then, run the calculateField_management calls for
    # the selected features.
    fcAsFeatureLayerForMeasuring = 'FCAsFeatureLayer_Measures'

    if Exists(fcAsFeatureLayerForMeasuring):
        Delete_management(fcAsFeatureLayerForMeasuring)
    else:
        pass

    MakeFeatureLayer_management(routesToMeasure, fcAsFeatureLayerForMeasuring)

    SelectLayerByAttribute_management(fcAsFeatureLayerForMeasuring,
                                      'CLEAR_SELECTION')
    SelectLayerByAttribute_management(fcAsFeatureLayerForMeasuring,
                                      'NEW_SELECTION', subsetSelectionQuery)

    countResult = GetCount_management(fcAsFeatureLayerForMeasuring)
    intCount = int(countResult.getOutput(0))
    print('There were ' + str(intCount) +
          ' features selected in the fcAsFeatureLayerForMeasuring layer.')
    if intCount >= 1:
        expressionText1 = 0
        CalculateField_management(fcAsFeatureLayerForMeasuring, startMeasure,
                                  expressionText1, "PYTHON_9.3")
        expressionText2 = 'float("{0:.3f}".format(!Shape_Length! / 5280.00))'
        CalculateField_management(fcAsFeatureLayerForMeasuring, endMeasure,
                                  expressionText2, "PYTHON_9.3")
    else:
        print "Not calculating due to lack of selected features."
def reportExtensionForRAndHCheck(featuresToCheck):
    if Exists(featuresToCheck):
        featuresName = returnFeatureClass(featuresToCheck)
        errorsFromRAndH = 'RAndHErrorsAsFeatureLayer'
        MakeFeatureLayer_management(featuresToCheck, errorsFromRAndH)
        errorsFromRAndHResult = GetCount_management(errorsFromRAndH)
        errorsFromRAndHCount = int(errorsFromRAndHResult.getOutput(0))

        print("Roads & Highways Non-Monotonic Check output was found.")
        print(
            "Extending the errors report with information from the Roads & Highways Non-Monotonicity Check."
        )

        with open(errorReportCSV, 'a') as fHandle:
            fHandle.write('\n' + 'Roads & Highways checks follow: ' + '\n')
            fHandle.write(featuresName + ', ' + str(errorsFromRAndHCount) +
                          '\n')

        #errorsRHGDB = returnGDBOrSDEName(featuresToCheck)
        #errorsFeatureClass = returnFeatureClass(featuresToCheck)
        #previousWorkspace = env.workspace
        #env.workspace = errorsRHGDB

        #time.sleep(25)
        #print("Also adding ReviewUser and ReviewInfo text fields to the")
        #print("Roads & Highways Non-Monotonicity Check error output feature class.")
        #AddField_management(errorsFeatureClass, "OptionalInfo", "TEXT", "", "", 250, "ReviewingInfo", nullable)

        #env.workspace = previousWorkspace

    else:
        print("No Roads & Highways Non-Monotonic Check output found.")
        print("Will not add additional information to the errors report csv.")
Example #3
0
def ExamineGDB(gdb):
    import ntpath, re
    reviewpath = ntpath.basename(gdb)

    from arcpy import env, ListWorkspaces, ListDatasets, ListTables, ListFeatureClasses, GetCount_management, Compact_management, ListFields
    #set the workspace from the config file
    env.workspace = ntpath.dirname(gdb)
    ng911 = gdb
    print "geodatabases"
    print ng911
    env.workspace = ng911
    datasets = ListDatasets()
    print "Datasets:"
    for dataset in datasets:
        print "     " + str(dataset)
    tables = ListTables()
    print " tables:"
    for table in tables:
        fcc = GetCount_management(table)
        print "     " + str(table)
    fd = datasets[0]
    fcs = ListFeatureClasses("", "", fd)
    for fc in fcs:
        fields = ListFields(fc)
        fcc = GetCount_management(fc)
        print fc + ", " + str(fcc) + " features"
        for field in fields:
            print "        " + str(field.name) + ", " + str(field.type)
    checkfile = reviewpath + "/" + ntpath.basename(ng911)
    topo = fd + "/NG911_Topology"
    Compact_management(ng911)
Example #4
0
def checkFeatureLocations(gdb):
    userMessage("Checking feature locations...")
    from os import path
    from arcpy import MakeFeatureLayer_management, SelectLayerByAttribute_management, SelectLayerByLocation_management, GetCount_management, Delete_management, da

    values = []
    #make sure feature are all inside authoritative boundary

    #get authoritative boundary
    authBound = path.join(gdb, "NG911", "AuthoritativeBoundary")
    ab = "ab"

    MakeFeatureLayer_management(authBound, ab)

    for dirpath, dirnames, filenames in da.Walk(gdb, True, '', False,
                                                ["FeatureClass"]):
        for filename in filenames:
            if filename != "AuthoritativeBoundary":
                #get full path name & create a feature layer
                fullPath = path.join(gdb, filename)
                fl = "fl"
                MakeFeatureLayer_management(fullPath, fl)

                #select by location to get count of features outside the authoritative boundary
                SelectLayerByLocation_management(fl, "INTERSECT", ab)
                SelectLayerByAttribute_management(fl, "SWITCH_SELECTION", "")
                #get count of selected records
                result = GetCount_management(fl)
                count = int(result.getOutput(0))

                #report results
                if count > 0:
                    fields = ("OBJECTID")
                    with da.SearchCursor(fl, fields) as rows:
                        for row in rows:
                            val = (today,
                                   "Feature not inside authoritative boundary",
                                   filename, "", row[0])
                            values.append(val)
                else:
                    userMessage(filename +
                                ": all records inside authoritative boundary")

                #clean up
                Delete_management(fl)

    userMessage("Completed check on feature locations")

    if values != []:
        RecordResults("fieldValues", values, gdb)
Example #5
0
def checkFeatureLocations(pathsInfoObject):
    gdb = pathsInfoObject.gdbPath
    
    userMessage("Checking feature locations...")

    #get today's date
    today = strftime("%m/%d/%y")
    values = []

    #make sure features are all inside authoritative boundary

    #get authoritative boundary
    authBound = path.join(gdb, "NG911", "AuthoritativeBoundary")
    ab = "ab"

    MakeFeatureLayer_management(authBound, ab)

    for dirpath, dirnames, filenames in Walk(gdb, True, '', False, ["FeatureClass"]):  # @UnusedVariable
        for filename in filenames:
            if filename != "AuthoritativeBoundary":
                #get full path name & create a feature layer
                fullPath = path.join(gdb, filename)
                fl = "fl"
                MakeFeatureLayer_management(fullPath, fl)

                #select by location to get count of features outside the authoritative boundary
                SelectLayerByLocation_management(fl, "INTERSECT", ab)
                SelectLayerByAttribute_management(fl, "SWITCH_SELECTION", "")
                #get count of selected records
                result = GetCount_management(fl)
                count = int(result.getOutput(0))

                #report results
                if count > 0:
                    fields = ("OBJECTID")
                    with SearchCursor(fl, fields) as rows:
                        for row in rows:
                            val = (today, "Feature not inside authoritative boundary", filename, "", row[0])
                            values.append(val)
                else:
                    userMessage( filename + ": all records inside authoritative boundary")

                #clean up
                Delete_management(fl)

    userMessage("Completed check on feature locations")

    if values != []:
        RecordResults("fieldValues", values, gdb)
Example #6
0
def calculatePointElevationField(points, raster, field_name):

    #monitor progress by counting features
    view = MakeTableView_management(points, 'points')
    count = int(GetCount_management('points').getOutput(0))
    SetProgressor('step', 'Extracting point elevations', 0, count)
    AddMessage('{} features to process'.format(count))

    # Get the object id field
    oid = Describe(points).OIDFieldName

    # make an update cursor and update each row's elevation field
    cursor = UpdateCursor(points, [field_name, 'SHAPE@', oid])

    # make a temporary dict to store our elevation values we extract
    elevations = {}

    for row in cursor:
        row[0] = getElevationAtPoint(raster, row[1])
        cursor.updateRow(row)
        AddMessage('row updated to {}; oid: {}'.format(row[0], row[2]))
        SetProgressorPosition()

    # release the data
    del cursor

    #reset this progressor
    ResetProgressor()
Example #7
0
def rand_string(resultsOutputQueue, xCount):
    """ Generates a random string of numbers, lower- and uppercase chars. """
    rand_str = ''.join(
        random.choice(string.ascii_lowercase + string.ascii_uppercase +
                      string.digits) for i in range(xCount))

    countedNumber = GetCount_management('Counties_No_Z')
    #output.put(rand_str + str(countedNumber))

    startTime = str(datetime.datetime.now())

    xCount = str(xCount)

    randomNumber = random.randint(1, 105)
    searchCursorWhereClause = "\"COUNTY_NUMBER\" = " + str(randomNumber) + " "

    newCursor = daSearchCursor('Counties_No_Z',
                               ["COUNTY_NAME", "COUNTY_NUMBER", "SHAPE@"],
                               searchCursorWhereClause)

    for rowItem in newCursor:
        rowItemString = "Cursor Row Item: " + str(rowItem[0]) + " & " + str(
            int(rowItem[1])) + "."

    endTime = str(datetime.datetime.now())

    resultsOutputQueue.put("Process number: " + xCount + "\n " + "Started: " +
                           startTime + " \n " + "Ended: " +
                           str(datetime.datetime.now()) + "\n " + rand_str +
                           " " + str(countedNumber) + " " + str(rowItemString))

    if "newCursor" in locals():
        del newCursor
    else:
        pass
Example #8
0
def RouteCheck(RID):
    #when running this function, pass the RID/LRS KEY Value into the function to update the desired RID
    #RID is structured '030C0011800W0'
    #Class should be L, C, or RM
    print "what route number should be updated?"
    #RID = '030C0011800W0'
    Class = RID[3]
    if Class in ("R", "M"):
        Class = "RM"
    else:
        pass
    print Class
    tablename = Class+RID
    RIDExp = "RID = '"+RID+"'"
    print "Updating route "+ str(RID)
    if Exists("UpdateGISPROD"):
        pass
    else:    
        AddTable = Class+"_NON_STATE_EVENTS"
        MakeTableView_management(r"C:/temp/Nusys_Check.gdb/"+AddTable, tablename+"_view","#")

        
    TableToTable_conversion(tablename+"_view", "in_memory", tablename, RIDExp)
    RecordCount = str(GetCount_management(tablename))
    if RecordCount = '0':
        print "No Records to Calculate"
Example #9
0
def AddInsert(fc, layer_name, newtbl, workspace):
    MakeTableView_management(newtbl, "NEWROWS_View", "#", "#", "#")
    addcount = int(GetCount_management("NEWROWS_View").getOutput(0))
    if addcount == 0:
        print "no new records"
        pass
    else:
        MakeFeatureLayer_management(fc, layer_name)
        MakeXYEventLayer_management(
            "NEWROWS_View", "CROSSINGLONGITUDE", "CROSSINGLATITUDE",
            "NEWROWS_Layer",
            "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],VERTCS['NAVD_1988',VDATUM['North_American_Vertical_Datum_1988'],PARAMETER['Vertical_Shift',0.0],PARAMETER['Direction',1.0],UNIT['Meter',1.0]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision",
            "#")
        FeatureClassToFeatureClass_conversion(
            "NEWROWS_Layer", "D:/Temp", "LOADTHIS1.shp", "#",
            """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGID,-1,-1;CROSSINGLA "CROSSINGLA" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLATITUDE,-1,-1;CROSSINGLO "CROSSINGLO" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLONGITUDE,-1,-1;CROSSINGTY "CROSSINGTY" true true false 2 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGTYPE,-1,-1""",
            "#")
        Append_management(
            "D:/Temp/LOADTHIS1.shp", layer_name, "NO_TEST",
            """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGID,-1,-1;CROSSINGLATITUDE "CROSSINGLATITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGLA,-1,-1;CROSSINGLONGITUDE "CROSSINGLONGITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGLO,-1,-1;CROSSINGTYPE "CROSSINGTYPE" true true false 2 Text 0 0 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGTY,-1,-1;LOADDATE "LOADDATE" true true false 36 Date 0 0 ,First,#""",
            "#")
        Delete_management("D:/Temp/LOADTHIS1.shp", "#")
        updatelyr = layer_name + "new"
        MakeFeatureLayer_management(layer_name, updatelyr, "LOADDATE IS NULL")
        with da.Editor(workspace) as edit:
            CalculateField_management(updatelyr, "LOADDATE",
                                      "datetime.datetime.now( )", "PYTHON_9.3",
                                      "#")
        del fc, layer_name, newtbl, workspace, updatelyr
        print "new rows inserted into Static_Crossings"
def catchment_delineation(inlets, flow_direction_raster, pour_point_field):
    """    
    Delineate the catchment area(s) for the inlet(s). Also provide back how many
    catchments we're dealing with so we can handle iteration accordingly.
    
    Input:
        - inlets: point shapefile or feature class representing inlet location(s)
            from which catchment area(s) will be determined. Can be one or
            many inlets.
    
    Output:
        - a python dictionary structured as follows:
            {
            "catchments": <path to the catchments raster created by the
            Arcpy.sa Watershed function>,
            "count": <count (int) of the number of inlets/catchments>
            }
    """

    # delineate the watershed(s) for the inlets. This is the standard spatial analyst watershed function
    catchments = Watershed(in_flow_direction_raster=flow_direction_raster,
                           in_pour_point_data=inlets,
                           pour_point_field=pour_point_field)
    # save the catchments layer to the fgdb set by the arcpy.env.scratchgdb setting)
    catchments_save = so("catchments", "timestamp", "fgdb")
    catchments.save(catchments_save)
    msg("...catchments raster saved:\n\t{0}".format(catchments_save))
    # get count of how many watersheds we should have gotten (# of inlets)
    count = int(GetCount_management(inlets).getOutput(0))
    # return a dictionary containing ref. to the scratch catchments layer and the count of catchments
    return {"catchments": catchments, "count": count}
Example #11
0
def ProjDelete(
):  #delete rows from the FMIS load table that are about to be processed
    delcount = GetCount_management(
        r'Database Connections\CANT_CPMS.sde\CPMS.CPMS_FMIS_GIS_DEL_ROWS')
    print str(delcount) + " records to delete"
    deletelist = list()
    if delcount == 0:
        print "no records to delete, ending"
        pass
    else:
        MakeTableView_management(FMIS_LOAD, "FMIS_TABLE")
        MakeTableView_management(deltbl, "deletes")
        with da.SearchCursor(deltbl,
                             "PROJECT_NUMBER") as delcur:  # @UndefinedVariable
            for row in delcur:
                DelXID = ("{0}".format(row[0]))
                #print DelXID + " is being deleted from the FMIS table"
                #AddJoin_management(layer_name,"CROSSINGID", deltbl, "CROSSINGID", "KEEP_ALL")
                #delsel = "PROJECT_NUMBER LIKE '"+DelXID+"'"
                #print delsel
                deletelist.append(DelXID)
                #SelectLayerByAttribute_management("FMIS_TABLE","ADD_TO_SELECTION", delsel)
        #print str(deletelist)
        delsel = "PROJECT_NUMBER IN " + str(deletelist)
        #for ProjectNumber in deletelist:
        print delsel

        SelectLayerByAttribute_management("FMIS_TABLE", "NEW_SELECTION",
                                          delsel)
        #DeleteRows_management("FMIS_TABLE")
    print "Delete function completed"
Example #12
0
def appender_DWBI_OLD(ShapeFileDate):
    print "appending the modeled data"
    env.workspace = repo
    filename1 = r"DWBI_SEGS"
    #C:\Workspace\pydot\sde_connections_10.3\sde@KTRIPS_sqlgiprod.sde\KTRIPS.SDE.KTRIPS_ROUTES
    enterprisedbRoutes = gdb + r"\KTRIPS.SDE.KTRIPS_ROUTE_Segments"
    print enterprisedbRoutes
    #Append_management(filename1, enterprisedbRoutes, "NO_TEST", "#")
    if Exists(filename1):
        MakeTableView_management(filename1, "AppendCheck", "#", "#", "#")
        AddJoin_management("AppendCheck",
                           "PRMT_ID",
                           enterprisedbRoutes,
                           "PRMT_ID",
                           join_type="KEEP_COMMON")
        recordsTest = str(GetCount_management("AppendCheck"))
        RemoveJoin_management("AppendCheck")
        if recordsTest == '0':
            print recordsTest + " of these records exist, appending now"
            Append_management(filename1, enterprisedbRoutes, "NO_TEST", "#")
        else:
            print recordsTest + " records already have been appended"
    else:
        print "there was a problem, " + str(filename1) + " could not be found"
        pass
def process_feature_classes(input_ws, output_ws, foreach_layer = None):
    """
    processes each featureclass with an optional function
    input_ws - the database or dataset path to process feature classes
    output_ws - the output for the feature classes
    foreach_layer - the function to process the feature classes
    """
    from arcpy import env, ListFeatureClasses, FeatureClassToGeodatabase_conversion, \
        AddWarning, AddMessage, GetCount_management, FeatureClassToFeatureClass_conversion
    from os.path import join
    env.workspace = input_ws
    feature_classes = ListFeatureClasses()
    for feature_class in feature_classes:
        
        AddMessage('Processing {}...'.format(feature_class))
        if env.skipEmpty:
            count = int(GetCount_management(feature_class)[0])
            if count == 0:
                AddWarning('Skipping because table is empty: {}'.format(feature_class))
                continue
        try:
            if foreach_layer:
                foreach_layer(input_ws, output_ws, feature_class)
            else:
                #copy each feature class over
                output_path = join(output_ws, get_name(feature_class))
                delete_existing(output_path)
                FeatureClassToFeatureClass_conversion(feature_class, output_ws, get_name(feature_class))
        except Exception as e:
            AddWarning('Error processing feature class {} - {}'.format(feature_class, e))
Example #14
0
def LoadMonthlyStats(ShapeFileDate):
    env.overwriteOutput = 1
    SourceFileFGB = str(ShapeFileDate[12:-4].replace("-", ""))
    SourceFileINT = SourceFileFGB[2:6] + SourceFileFGB[0:2]
    infileMonthly = "in_memory\LRS" + SourceFileFGB
    IntersectionMonthly = "in_memory\IntptKTRIPS" + SourceFileINT
    SourceFileTxt = str(ShapeFileDate[:-4].replace("-", "_"))
    MakeTableView_management(infileMonthly, "AppendCheckMo", "#", "#", "#")
    #Config this
    enterpriseDBMonthly = gdb + "\KTRIPS.SDE.KTRIPS_MonthlySum"
    #enterpriseDBMonthly = gdb+r"\INTERMODAL.DBO.KTRIPS_MonthlySum"
    #inputfc = r"C:\input.shp"
    outputView = "AppendCheckMonthly"
    fieldname = "SourceFile"
    fieldvalue = SourceFileTxt
    whereclause = str(
        BuildWhereClause(enterpriseDBMonthly, fieldname, fieldvalue))
    MakeTableView_management(enterpriseDBMonthly, outputView, whereclause)
    recordsTest = str(GetCount_management(outputView))
    if recordsTest == '0':
        print recordsTest + " of these records existed and will be appended right now"
        Append_management(infileMonthly,
                          enterpriseDBMonthly,
                          schema_type="NO_TEST",
                          field_mapping="#",
                          subtype="")
        Append_management(IntersectionMonthly,
                          enterpriseDBMonthly + "Intr",
                          schema_type="NO_TEST",
                          field_mapping="#",
                          subtype="")
    else:
        print recordsTest + " records already have been appended"
Example #15
0
def AnnualizeData(YearToAnnualize):
    annualLayer = gdb + "\KTRIPS.SDE.Ktrips_Annual"
    currentyYear = gdb + "\KTRIPS.SDE.Ktrips_CurrentYear"
    SelectYear = YearSelTest
    CalcYear = str(int(YearSelTest - 1))
    YearSelTest = "TripYear = '" + SelectYear + "'"
    if Exists("Check1"):
        Delete_management("Check1")
    MakeFeatureLayer_management(annualLayer, "Check1", YearSelTest)
    CheckExistence = GetCount_management("Check1")
    print CheckExistence
    if int(str(CheckExistence)) > 0:
        print "This source file info  is already in the target feature"
        runnext = False
    elif int(str(CheckExistence)) == 0:
        print 'the information is new for this source file and will be added.'
        runnext = True
        Append_management(currentyYear, annualLayer, "NO_TEST", "#")
        CalculateField_management(annualLayer, "TripYear", CalcYear,
                                  "PYTHON_9.3")
        TruncateTable_management(currentyYear)
    else:
        print 'something isnt working here'
    print runnext
    pass
Example #16
0
def FileChecker(ShapeFileDate, CheckLayer):
    #runnext = False
    SourceFileTxt = str(ShapeFileDate.replace("-", "_"))
    print ShapeFileDate
    selectdate = "Sourcefile = '" + SourceFileTxt + "'"
    print selectdate
    print CheckLayer
    if Exists("Check1"):
        Delete_management("Check1")
    try:
        MakeFeatureLayer_management(CheckLayer, "Check1", selectdate)
    except:
        MakeTableView_management(CheckLayer, "Check1", selectdate)
    CheckExistence = GetCount_management("Check1")
    print CheckExistence
    if int(str(CheckExistence)) > 0:
        print "This source file info  is already in the target feature"
        runnext = False
    elif int(str(CheckExistence)) == 0:
        print 'the information is new for this source file and will be added.'
        runnext = True
    else:
        print 'something isnt working here'
    print runnext
    return runnext
Example #17
0
def toOpen(featureClass, outJSON, includeGeometry="geojson"):
    #check the file type based on the extention
    fileType=getExt(outJSON)
    #some sanity checking
    #valid geojson needs features, seriously you'll get an error
    if not int(GetCount_management(featureClass).getOutput(0)):
        AddMessage("No features found, skipping")
        return
    elif not fileType:
        AddMessage("this filetype doesn't make sense")
        return
    #geojson needs geometry
    if fileType in ("geojson", "topojson"):
        includeGeometry="geojson"
    elif fileType=="sqlite":
        includeGeometry="well known binary"
    else:
        includeGeometry=includeGeometry.lower()
    #open up the file
    outFile=prepareFile(outJSON,featureClass,fileType,includeGeometry)
    #outFile will be false if the format isn't defined
    if not outFile:
        AddMessage("I don't understand the format")
        return
    #write the rows
    writeFile(outFile,featureClass,fileType,includeGeometry)
    #go home
    closeUp(outFile,fileType)
Example #18
0
 def __init__(self, featureClass):
     self.featureCount = int(GetCount_management(featureClass).getOutput(0))
     SetProgressor("step",
                   "Found {0} features".format(str(self.featureCount)), 0,
                   100, 1)
     AddMessage("Found " + str(self.featureCount) + " features")
     self.percent = 0
     self.current = 0
Example #19
0
def MakeRouteLayers(OpEnvironmentMode):
    from EXOR_GIS_CONFIG import OpEnvironment
    OpRunIn= OpEnvironment.OpRunInRoutes  # @UndefinedVariable
    OpRunOut= OpEnvironment.OpRunOut  # @UndefinedVariable
    #adm=OpEnvironment.adm  # @UndefinedVariable
    Owner=OpEnvironment.Owner  # @UndefinedVariable
    DB=OpEnvironment.DB  # @UndefinedVariable
    
    env.workspace = OpRunIn
    env.overwriteOutput = True
    print OpRunIn
    
    #combine the connection, db, and owner to the destination path for enterprise geodatabase output
    OpRunFullOut = OpRunOut+r"/"+DB+"."+Owner+"."
    
    print "Updating CRND"
    #add the Map Extract Event Table limited to primary direction into memory
    TruncateTable_management(OpRunFullOut+"CRND")
    Append_management("CRND", OpRunFullOut+"CRND", "NO_TEST")
    print "Updating SRND"
    TruncateTable_management(OpRunFullOut+"SRND")
    Append_management("SRND", OpRunFullOut+"SRND", "NO_TEST")
    print "Updating NSND"
    TruncateTable_management(OpRunFullOut+"NSND")
    Append_management("NSND", OpRunFullOut+"NSND", "NO_TEST")  
        
    if GetCount_management("MAP_EXTRACT")>0:
        MakeTableView_management("MAP_EXTRACT", "V_MV_MAP_EXTRACT", "DIRECTION < 3")
    
        #Add the CRND CANSYS rotue layer, dynseg the event table, truncate and load to CMLRS
        MakeFeatureLayer_management("CRND", "CRND")
        MakeRouteEventLayer_lr("CRND", "NE_UNIQUE", "MAP_EXTRACT", "NQR_DESCRIPTION LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", "CMLRS1", "", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT")
        try:
            print "truncation and appending the CMLRS"
            TruncateTable_management(OpRunFullOut+"CMLRS")
            Append_management("CMLRS1", OpRunFullOut+"CMLRS", "NO_TEST")
        except:
            print "could not truncate, overwriting CMLRS"
            FeatureClassToFeatureClass_conversion("CMLRS1", OpRunOut, "CMLRS","#", "#", "#")
        #except:
        #    print "could not update the CMLRS"
        
        MakeFeatureLayer_management("SRND", "SRND")
        MakeRouteEventLayer_lr("SRND", "NE_UNIQUE", "MAP_EXTRACT", "STATE_NQR_DESCRIPTION LINE BEG_STATE_LOGMILE END_STATE_LOGMILE", out_layer="SMLRS1", offset_field="", add_error_field="ERROR_FIELD", add_angle_field="NO_ANGLE_FIELD", angle_type="NORMAL", complement_angle="ANGLE", offset_direction="LEFT", point_event_type="POINT")
        try:
            print "truncation and appending the SMLRS"
            TruncateTable_management(OpRunFullOut+"SMLRS")
            Append_management("SMLRS1", OpRunFullOut+"SMLRS", "NO_TEST")
        except:
            print "could not truncate, overwriting SMLRS"
            FeatureClassToFeatureClass_conversion("SMLRS1", OpRunOut, "SMLRS","#", "#", "#")
        #except:
        #    print "could not update the SMLRS" 
        
        print "Route Layers Updated"
    else: 
        print "the map extract is unreliable and was not exported"
Example #20
0
def newcheck():
    newcnt = GetCount_management(
        r'Database Connections\CANT_CPMS.sde\CPMS.CPMS_FMIS_GIS_INS_ROWS')
    print str(newcnt) + " records to process"
    if newcnt == 0:
        print "no records to load, pass"
        pass
    else:
        print "Starting the processing now"
        FIMS_GIS()
Example #21
0
def ClearSteward(StewardID):
    from arcpy import DeleteFeatures_management, DeleteRows_management, GetCount_management
    targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO
    #\\gisdata\arcgis\GISdata\DASC\NG911\Final\[email protected]\NG911.GEO.NG911\NG911.GEO.RoadCenterline
    print targetpath
    env.workspace = targetpath
    where_clause = "STEWARD LIKE '" + StewardID + "'"

    MakeFeatureLayer_management(targetpath + ".RoadCenterline",
                                "Steward_Delete_Roads", where_clause)
    MakeTableView_management(targetpath + ".RoadAlias", "Steward_Delete_Alias",
                             where_clause)
    LineCount = GetCount_management("Steward_Delete_Roads")
    AliasCount = GetCount_management("Steward_Delete_Alias")
    print 'deleting ' + str(
        LineCount) + ' road center lines where ' + where_clause
    print 'deleting ' + str(AliasCount) + ' alias rows where ' + where_clause
    DeleteFeatures_management(in_features="Steward_Delete_Roads")
    DeleteRows_management(in_rows="Steward_Delete_Alias")
def reportExtensionForQCGDB(singlePartPointErrors, singlePartLineErrors):
    # Get a count for the singlepart features (if they exist)
    # and append the count data to the end of the errorReportCSV.
    if Exists(singlePartPointErrors) and Exists(singlePartLineErrors):
        singlePartPointFeaturesName = returnFeatureClass(singlePartPointErrors)
        singlePartPointErrorsResult = GetCount_management(
            singlePartPointErrors)
        singlePartPointErrorsCount = int(
            singlePartPointErrorsResult.getOutput(0))

        singlePartLineFeaturesName = returnFeatureClass(singlePartLineErrors)
        singlePartLineErrorsResult = GetCount_management(singlePartLineErrors)
        singlePartLineErrorsCount = int(
            singlePartLineErrorsResult.getOutput(0))
        try:
            with open(errorReportCSV, 'a') as fHandle:
                fHandle.write(singlePartPointFeaturesName + ', ' +
                              str(singlePartPointErrorsCount) + '\n')
                fHandle.write(singlePartLineFeaturesName + ', ' +
                              str(singlePartLineErrorsCount) + '\n')
        except:
            print("There was an error writing to the file.")
    else:
        print("The Single Part output was not found.")
        print(
            "Will not add the Single Part information to the errors report csv."
        )
def main():
    print("Starting to dissolve the routes source.")
    stateSystemSelectedFeatures = 'StateSystem_Features'
    selectQuery1 = '''LRS_ROUTE_PREFIX in ('I', 'U', 'K')'''
    MakeFeatureLayer_management (routesSourceCenterlines, stateSystemSelectedFeatures, selectQuery1)
    CopyFeatures_management(stateSystemSelectedFeatures, routesSourcePreDissolveOutput)
    
    #GetCount on the features in the layer here.
    countResult = GetCount_management(stateSystemSelectedFeatures)
    intCount = int(countResult.getOutput(0))
    print('Found ' + str(intCount) + ' state system features to be dissolved.')
    
    # Removed STATE_FLIP_FLAG because we've already flipped the data prior to this process.
    dissolveFields = "KDOT_DIRECTION_CALC;KDOT_LRS_KEY;LRS_COUNTY_PRE;LRS_ROUTE_PREFIX;LRS_ROUTE_SUFFIX;LRS_UNIQUE_IDENT"
    statisticsFields = "BEG_NODE MIN;BEG_NODE MAX;END_NODE MAX;END_NODE MIN;COUNTY_BEGIN_MP MIN;COUNTY_END_MP MAX"
    multipart = "SINGLE_PART"
    unsplitLines = "DISSOLVE_LINES"
    ## Use a selection for the state system so that it will work the first time.
    ## Then, later expand the selection and the amount of routes that are attempted for
    ## the dissolve.
    Dissolve_management(stateSystemSelectedFeatures, routesSourceDissolveOutput, dissolveFields, statisticsFields, multipart, unsplitLines)
    print("Completed dissolving the routes source.")
def writeStewardPointCounts():
    MakeFeatureLayer_management(originalPointsSource, originalPointsAsLayer)
    MakeFeatureLayer_management(simplifiedPointsSource, simplifiedPointsAsLayer)
    
    allStewardsDict = dict()
    
    #Programatically grab the stewards instead of manually listing them here
    newCursor = daSearchCursor(originalPointsAsLayer, ["OID@", "STEWARD"])
    
    for cursorItem in newCursor:
        allStewardsDict[cursorItem[1]] = 'True'
    
    if 'newCursor' in locals():
        try:
            del newCursor
        except:
            print("The cursor exists, but it could not be successfully removed.")
    else:
        print("The cursor has already been removed.")
    
    
    try:
        wHandle = open(outputFile,'w')
        columnNames = "StewardID , OriginalCount , SimplifiedCount\n"
        wHandle.write(columnNames)
        
        # For each steward in the list, get a count of all of the original centerline
        # points and the the simplified road centerlines points.
        # Next, write the data out to a text file in comma separated form.
        for stewardItem in allStewardsDict.keys():
            if stewardItem is not None:
                selectionQuery = """ "STEWARD" = '""" + stewardItem + """' """
                SelectLayerByAttribute_management(originalPointsAsLayer, selectionTypeToUse, selectionQuery)
                oCountResult = GetCount_management(originalPointsAsLayer)
                originalCount = int(oCountResult.getOutput(0))
                SelectLayerByAttribute_management(simplifiedPointsAsLayer, selectionTypeToUse, selectionQuery)
                sCountResult = GetCount_management(simplifiedPointsAsLayer)
                simplifiedCount = int(sCountResult.getOutput(0))
                strToWrite = "'" + stewardItem + "'" + ", " + str(originalCount) + ", " + str(simplifiedCount) + "\n"
                print("Writing " + strToWrite + " to the file: " + outputFile + ".")
                wHandle.write(strToWrite)
            else:
                print("The stewardItem is None, so it will be skipped.")
    
    except:
        errorInfo = sys.exc_info()[0]
        errorInfo1 = sys.exc_info()[1]
        print("An error occurred: " + str(errorInfo) + str(errorInfo1))
        try:
            wHandle.close()
        except:
            raise
        try:
            del errorInfo
            del errorInfo1
        except:
            pass
Example #25
0
def checkAddressPointFrequency(AddressPoints, gdb):
    from arcpy import Frequency_analysis, MakeTableView_management, DeleteRows_management, GetCount_management, Delete_management, Exists
    from os.path import join

    AP_Freq = join(gdb, "AP_Freq")
    fl = "fl"

    #remove the frequency table if it exists already
    if Exists(AP_Freq):
        Delete_management(AP_Freq)

    #run frequency analysis
    Frequency_analysis(AddressPoints, AP_Freq, "MUNI;HNO;HNS;PRD;STP;RD;STS;POD;POM;ZIP;BLD;FLR;UNIT;ROOM;SEAT;LOC;LOCTYPE", "")

    #get count of records
    rFreq = GetCount_management(AP_Freq)
    rCount = int(rFreq.getOutput(0))

    #delete records
    #make where clause
    wc = "Frequency = 1 or LOCTYPE <> 'Primary'"

    #make feature layer
    MakeTableView_management(AP_Freq, fl, wc)

    #get count of the results
    result = GetCount_management(fl)
    count = int(result.getOutput(0))

    if rCount != count:
        #Delete
        DeleteRows_management(fl)
        userMessage("Checked frequency of address points. Results are in table " + AP_Freq)
    elif rCount == count:
        Delete_management(AP_Freq)
        userMessage("All address points are unique.")
Example #26
0
def RouteCheck(RID):
    #when running this function, pass the RID/LRS KEY Value into the function to update the desired RID
    #RID is structured '030C0011800W0'
    #Class should be L, C, or RM
    print "what route number should be updated?"
    #RID = '030C0011800W0'
    Class = RID[3]
    if Class in ("R", "M"):
        Class = "RM"
    else:
        pass
    print RID
    RID_ = RID.replace('-', '_')
    RIDExp = "RID = '" + RID + "'"
    tablename = Class + RID_
    print RIDExp
    print "Updating route " + str(RID) + " in table " + str(RID_)
    if Exists("UpdateGISPROD"):
        print "this exists"
        pass
    else:
        AddTable = Class + "P_NON_STATE_EVENTS"
        MakeTableView_management(r"in_memory/" + AddTable, tablename + "_view",
                                 "#")
        MakeFeatureLayer_management(
            NonState, "NonStateUpdate",
            "((LRS_KEY LIKE '%C%' OR LRS_ROUTE_PREFIX = 'C') AND (MILEAGE_COUNTED = -1)) OR (LRS_DIR_OF_TRAVEL = 'P' and SURFACE = 'Propose')"
        )

    TableToTable_conversion(tablename + "_view", "in_memory", tablename,
                            RIDExp)
    if str(GetCount_management(tablename)) == '0':
        print "No Records to Calculate"
    else:
        try:
            RemoveJoin_management("NonStateUpdate")
        except:
            print "no NonStateUpdate, creating the NonStateSystem layer"
        MakeFeatureLayer_management(
            NonState, "NonStateUpdate",
            "(MILEAGE_COUNTED = -1 OR SURFACE = 'Propose')")
        AddJoin_management("NonStateUpdate", "ID2", tablename,
                           "FID_NON_STATE_SYSTEM", "KEEP_COMMON")
        print "Check the numbers one more time, and review"
        print "start Edit session on NonStateUpdate now and type RouteCalc(RID) if it all looks good"
    print "RteChk script completed successfully"
Example #27
0
def delForTesting():
    env.workspace = repo
    whereClause = "ObjectID = 91783"
    enterprisedbRoutes = gdb + "\INTERMODAL.DBO.KTRIPS_ROUTES"
    loadedRoutesLayer = 'routesAsLyr'
    MakeFeatureLayer_management(enterprisedbRoutes, loadedRoutesLayer, "#",
                                "#")
    SelectLayerByAttribute_management(loadedRoutesLayer, "NEW_SELECTION",
                                      whereClause)
    currentCount = GetCount_management(loadedRoutesLayer)
    print "Selected " + str(currentCount) + " rows to delete."
    # Probably have to disconnect users before the next part will work. =(
    if int(str(currentCount)) > 0:
        print 'Deleting selected rows...'
        DeleteFeatures_management(loadedRoutesLayer)
    else:
        print 'Will not delete as there are no rows selected.'
def copy_tables(input_ws, output_ws, foreach_table = None):
    """
    copies tables or sends each table to a function
        input_ws - the input database
        output_ws - the output database
        foreach_table - the optional function to process each table
    """
    from arcpy import env, ListTables, AddMessage, AddWarning, \
        TableToGeodatabase_conversion, GetCount_management, \
        TableToTable_conversion
    from os.path import join 

    env.workspace = input_ws
    for table in ListTables():
        AddMessage('Processing table: {}'.format(table))
        
        if env.skipAttach and '_attach' in table.lower():
            AddWarning('Skipping attachments table {}'.format(table))
            continue
        
        if env.skipEmpty:
            count = int(GetCount_management(table)[0])
            if count == 0:
                AddWarning('Skipping because table is empty: {} (empty)'.format(table))
                continue
        
        try:
            if foreach_table:
                foreach_table(input_ws, output_ws, table)
            else:
                output_path = join(output_ws, get_name(table))
                delete_existing(output_path)
                TableToTable_conversion(table, output_ws, get_name(table))
        except Exception as e:
            AddWarning('Error on table: {} - {}'.format(table, e))
            pass
Example #29
0
def checkAddressPointFrequency(AddressPoints, gdb):
    from arcpy import Frequency_analysis, MakeTableView_management, DeleteRows_management, GetCount_management, Delete_management, Exists
    from os.path import join

    AP_Freq = join(gdb, "AP_Freq")
    fl = "fl"

    #remove the frequency table if it exists already
    if Exists(AP_Freq):
        Delete_management(AP_Freq)

    #run frequency analysis
    Frequency_analysis(
        AddressPoints, AP_Freq,
        "MUNI;HNO;HNS;PRD;STP;RD;STS;POD;POM;ZIP;BLD;FLR;UNIT;ROOM;SEAT;LOC;LOCTYPE",
        "")

    #get count of records
    rFreq = GetCount_management(AP_Freq)
    rCount = int(rFreq.getOutput(0))

    #delete records
    #make where clause
    wc = "Frequency = 1 or LOCTYPE <> 'Primary'"

    #make feature layer
    MakeTableView_management(AP_Freq, fl, wc)

    #get count of the results
    result = GetCount_management(fl)
    count = int(result.getOutput(0))

    if rCount != count:
        #Delete
        DeleteRows_management(fl)
        userMessage(
            "Checked frequency of address points. Results are in table " +
            AP_Freq)
    elif rCount == count:
        Delete_management(AP_Freq)
        userMessage("All address points are unique.")
def TranscendRampReplacement():
    MakeFeatureLayer_management (routesSourceCenterlines, routesSourceFeatureLayer)

    SelectLayerByAttribute_management(routesSourceFeatureLayer, "CLEAR_SELECTION")
    selectionQuery = """ "LRS_ROUTE_PREFIX" = 'X' AND "Ramps_LRSKey" IS NOT NULL AND "Ramps_LRSKey" <> '' """
    SelectLayerByAttribute_management(routesSourceFeatureLayer, "NEW_SELECTION", selectionQuery)

    countResult = GetCount_management(routesSourceFeatureLayer)
    intCount = int(countResult.getOutput(0))

    print('Selected ' + str(intCount) + ' ramp features to be replaced.')

    if intCount > 0:
        print("Deleting those ramp features from the " + returnFeatureClass(routesSourceCenterlines) + " layer.")
        DeleteFeatures_management(routesSourceFeatureLayer)
    else:
        print("No features selected. Skipping feature deletion.")

    # Remove the matching routes to prepare for the Interchange_Ramps information.
    ## After error matching is achieved, use replace geometry and replace attributes to not lose data
    ## from using the less effective method of:
    ## deleting the old Interchange_Ramps information, then re-adding with append.

    # Add the Interchange_Ramps information.

    # Checking to see if the copy for repairing already exists.
    # If so, remove it.
    if Exists(interchangeRampFCRepairCopy):
        Delete_management(interchangeRampFCRepairCopy)
    else:
        pass
    # Create a new file for the copy for repairing since repair modifies the input.
    CopyFeatures_management(interchangeRampFC, interchangeRampFCRepairCopy)

    # Repairs the geometry, modifies input.
    # Deletes features with null geometry (2 expected, until Shared.Interchange_Ramp is fixed).
    print("Repairing ramp geometry in the " + returnFeatureClass(interchangeRampFCRepairCopy) + " layer.")
    RepairGeometry_management(interchangeRampFCRepairCopy, "DELETE_NULL")

    # Create a fieldmapping object so that the Interchange_Ramps can be correctly imported with append.
    appendInputs = [interchangeRampFCRepairCopy]
    appendTarget = routesSourceCenterlines
    schemaType = "NO_TEST"

    # Field mapping goes here.
    # Interchange_Ramp.LRS_KEY to RoutesSource_Test.LRSKEY
    fm_Field1 = FieldMap()
    fm_Field1.addInputField(interchangeRampFCRepairCopy, "LRS_KEY")
    fm_Field1_OutField = fm_Field1.outputField
    fm_Field1_OutField.name = 'LRSKEY'
    fm_Field1.outputField = fm_Field1_OutField

    # Interchange_Ramp.BEG_CNTY_LOGMILE to RoutesSource_Test.NON_STATE_BEGIN_MP
    fm_Field2 = FieldMap()
    fm_Field2.addInputField(interchangeRampFCRepairCopy, "BEG_CNTY_LOGMILE")
    fm_Field2_OutField = fm_Field2.outputField
    fm_Field2_OutField.name = 'NON_STATE_BEGIN_MP'
    fm_Field2.outputField = fm_Field2_OutField

    # Interchange_Ramp.END_CNTY_LOGMILE to RoutesSource_Test.NON_STATE_END_MP
    fm_Field3 = FieldMap()
    fm_Field3.addInputField(interchangeRampFCRepairCopy, "END_CNTY_LOGMILE")
    fm_Field3_OutField = fm_Field3.outputField
    fm_Field3_OutField.name = 'NON_STATE_END_MP'
    fm_Field3.outputField = fm_Field3_OutField

    # Create the fieldMappings object
    interchangeRampsMappings = FieldMappings()
    interchangeRampsMappings.addFieldMap(fm_Field1)
    interchangeRampsMappings.addFieldMap(fm_Field2)
    interchangeRampsMappings.addFieldMap(fm_Field3)

    # Add the fieldMap objects to the fieldMappings object.
    print("Appending the features from " + returnFeatureClass(interchangeRampFCRepairCopy) + " into " + returnFeatureClass(routesSourceCenterlines) + ".")
    Append_management(appendInputs, appendTarget, schemaType, interchangeRampsMappings)
def main():
    layer = GetParameterAsText(0)
    updateBlanksOnly = GetParameterAsText(1)

    expression = ""
    a = ""
    field_list = []

    #define object & field list
    if basename(layer) in ("RoadCenterline", "AddressPoints"):
        a = getFCObject(layer)
        field_list = a.LABEL_FIELDS
    else:
        userMessage(layer + " does not work with this tool. Please select the NG911 road centerline or address point file.")

    #make sure the object is something
    if a != "":
        #start at 1 since 0 is the label field itself
        i = 1

        #create the expression
        while i < len(field_list):
            #since the house number needs a string conversion, we need to have a slightly different expression for the first piece
            if i == 1:
                if basename(layer) == "AddressPoints":
                    expression = 'str(!' +  field_list[i] + '!) + " " + !'
                else:
                    expression = '!' + field_list[i] + '! + " " + !'

            else:
                expression = expression + field_list[i] + '! + " " + !'

            i += 1

        expression = expression[:-10]

    userMessage(expression)

    labelField = a.LABEL

    userMessage(labelField)

    if expression != "":
        lyr = "lyr"
        MakeFeatureLayer_management(layer, lyr)

        qry = labelField + " is null or " + labelField + " = '' or " + labelField + " = ' '"

        #select only the blank ones to update if that's what the user wanted
        if updateBlanksOnly == "true":
            SelectLayerByAttribute_management(lyr, "NEW_SELECTION", qry)

        userMessage("Calculating label...")
        CalculateField_management(lyr, labelField, expression, "PYTHON_9.3")

        #make sure no records were left behind
        SelectLayerByAttribute_management(lyr, "NEW_SELECTION", qry)
        result = GetCount_management(lyr)
        count = int(result.getOutput(0))

        #if the count is higher than 0, it means the table had null values in some of the concatonated fields
        if count > 0:
            gdb = dirname(dirname(layer))
            fields = tuple(field_list)

            #start edit session
            edit = Editor(gdb)
            edit.startEditing(False, False)

            #run update cursor
            with UpdateCursor(layer, fields, qry) as rows:
                for row in rows:
                    field_count = len(fields)
                    start_int = 1
                    label = ""

                    #loop through the fields to see what's null & skip it
                    while start_int < field_count:
                        if row[start_int] is not None:
                            if row[start_int] not in ("", " "):
                                label = label + " " + str(row[start_int])
                        start_int = start_int + 1

                    row[0] = label
                    rows.updateRow(row)

            edit.stopEditing(True)


        #clean up all labels
        trim_expression = '" ".join(!' + labelField + '!.split())'
        CalculateField_management(layer, labelField, trim_expression, "PYTHON_9.3")
Example #32
0
def checkFrequency(fc, freq, fields, gdb, version):
    fl = "fl"
    fl1 = "fl1"
    wc = "FREQUENCY > 1"

    #remove the frequency table if it exists already
    if Exists(freq):
        try:
            Delete_management(freq)
        except:
            userMessage("Please manually delete " + freq + " and then run the frequency check again")

    if not Exists(freq):
        try:
            #see if we're working with address points or roads, create a where clause
            filename = ""
            if freq == join(gdb, "AP_Freq"):
                filename = "AddressPoints"
                wc1 = "HNO <> 0"
            elif freq == join(gdb, "Road_Freq"):
                filename = "RoadCenterline"
                wc1 = "L_F_ADD <> 0 AND L_T_ADD <> 0 AND R_F_ADD <> 0 AND R_T_ADD <> 0"

            if version != "10":
                wc1 = wc1 + " AND SUBMIT = 'Y'"

            #run query on fc to make sure 0's are ignored
            MakeTableView_management(fc, fl1, wc1)

            #split field names
            fieldsList = fields.split(";")
            fieldCountList = []
            fl_fields = []
            for f in fieldsList:
                f = f.strip()
                fList = [f,"COUNT"]
                fieldCountList.append(fList)
                fl_fields.append(f)

            #run frequency analysis
            Statistics_analysis(fl1, freq, fieldCountList, fields)

            #make feature layer
            MakeTableView_management(freq, fl, wc)

            #get count of the results
            result = GetCount_management(fl)
            count = int(result.getOutput(0))

            if count > 0:

                #set up parameters to report duplicate records
                values = []
                recordType = "fieldValues"
                today = strftime("%m/%d/%y")

                #add information to FieldValuesCheckResults for all duplicates

                #get field count
                fCount = len(fl_fields)

                #get the unique ID field name
                id1 = getUniqueIDField(filename.upper())

                #run a search on the frequency table to report duplicate records
                with SearchCursor(freq, fl_fields, wc) as rows:
                    for row in rows:
                        i = 0
                        #generate where clause to find duplicate ID's
                        wc = ""
                        while i < fCount:
                            stuff = ""
                            if row[i] != None:
                                try:
                                    stuff = " = '" + row[i] + "' "
                                except:
                                    stuff = " = " + str(row[i]) + " "
                            else:
                                stuff = " is null "
                            wc = wc + fl_fields[i] + stuff + "and "
                            i += 1

                        #trim last "and " off where clause
                        wc = wc[0:-5]

                        #find records with duplicates to get their unique ID's
                        with SearchCursor(fl1, (id1), wc) as sRows:
                            for sRow in sRows:
                                fID = sRow[0]
                                report = str(fID) + " has duplicate field information"
                                val = (today, report, filename, "", fID)
                                values.append(val)

                #report duplicate records
                if values != []:
                    RecordResults(recordType, values, gdb)
                    userMessage("Checked frequency. Results are in table FieldValuesCheckResults")

            elif count == 0:
                userMessage("Checked frequency. All records are unique.")

            #clean up
            Delete_management(fl)
            Delete_management(fl1)

            try:
                Delete_management(freq)
            except:
                userMessage("Could not delete frequency table")
        except:
            userMessage("Could not fully run frequency check")
def iteratorprocess():
    
    env.workspace = extractDataGDBPath
    accDataFeaturesList = ListFeatureClasses("CrashLocation.GEO.ACC*")
    # Use the FullTable for the overall total.
    accDataFullTable = os.path.join(extractDataGDBPath, r'CrashLocation.GEO.GIS_GEOCODE_ACC')
    
    withRoadsTotal = 0
    
    gcKTotal = 0
    gcNKTotal = 0
    ofsKTotal = 0
    ofsNKTotal = 0
    NG911CoAccidents = 0
    
    inMemoryTempLayer = 'inMemoryTempFC'
    
    for countyItem in coAbbrAndNoList:
        countyNumber = countyItem[1]
        countyName = countyItem[2]
        
        accDataPointsKDOT = "CrashLocation.GEO.ACC_PTS_" + countyNumber
        accDataPointsNK = "CrashLocation.GEO.ACC_PTS_" + countyNumber + "_NK"
        accDataOffsetKDOT = "CrashLocation.GEO.ACC_OFS_PTS_" + countyNumber
        accDataOffsetNK = "CrashLocation.GEO.ACC_OFS_PTS_" + countyNumber + "_NK"
        
        # Re-zero the loop variables here so that the table doesn't get incorrect information in it.
        
        totalAccidents = 0
        
        geocodedAccidents = 0
        geocodedAccidentsNK = 0
        offsetAccidents = 0
        offsetAccidentsNK = 0
        
        gcPercent = '0.00'
        gcNKPercent = '0.00'
        ofsPercent = '0.00'
        ofsNKPercent = '0.00'
        
        if (accDataPointsKDOT in accDataFeaturesList) or (accDataPointsNK in accDataFeaturesList) or \
            (accDataOffsetKDOT in accDataFeaturesList) or (accDataOffsetNK in accDataFeaturesList):
            
            if accDataPointsKDOT in accDataFeaturesList:
                
                try:
                    Delete_management(inMemoryTempLayer)
                except:
                    pass
                
                accDataPointsKDOTPath = os.path.join(extractDataGDBPath, accDataPointsKDOT)
                
                MakeFeatureLayer_management(accDataPointsKDOTPath, inMemoryTempLayer)
                
                #SelectLayerByAttribute_management(inMemoryTempLayer, 'CLEAR_SELECTION')
                
                tempResult = GetCount_management(inMemoryTempLayer)
                
                totalAccidents = int(tempResult.getOutput(0))
                
                if totalAccidents > 0:
                    withRoadsTotal += totalAccidents
                else:
                    pass
                
                selectWhereClause = """ Status <> 'U' """
                
                SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause)
                
                tempResult = GetCount_management(inMemoryTempLayer)
                
                geocodedAccidents = int(tempResult.getOutput(0))
                
            else:
                pass
            
            if accDataPointsNK in accDataFeaturesList:
                
                try:
                    Delete_management(inMemoryTempLayer)
                except:
                    pass
                
                accDataPointsNKPath = os.path.join(extractDataGDBPath, accDataPointsNK)
                
                MakeFeatureLayer_management(accDataPointsNKPath, inMemoryTempLayer)
                
                selectWhereClause = """ Status <> 'U' """
                
                SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause)
                
                tempResult = GetCount_management(inMemoryTempLayer)
                
                geocodedAccidentsNK = int(tempResult.getOutput(0))
                
            else:
                pass
            
            if accDataOffsetKDOT in accDataFeaturesList:
                
                try:
                    Delete_management(inMemoryTempLayer)
                except:
                    pass
                
                accDataOffsetKDOTPath = os.path.join(extractDataGDBPath, accDataOffsetKDOT)
                
                MakeFeatureLayer_management(accDataOffsetKDOTPath, inMemoryTempLayer)
                
                selectWhereClause = """ isOffset IS NOT NULL """
                
                SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause)
                
                tempResult = GetCount_management(inMemoryTempLayer)
                
                offsetAccidents = int(tempResult.getOutput(0))
                
            else:
                pass
            
            if accDataOffsetNK in accDataFeaturesList:
                
                try:
                    Delete_management(inMemoryTempLayer)
                except:
                    pass
                
                accDataOffsetNKPath = os.path.join(extractDataGDBPath, accDataOffsetNK)
                
                MakeFeatureLayer_management(accDataOffsetNKPath, inMemoryTempLayer)
                
                selectWhereClause = """ isOffset IS NOT NULL """
                
                SelectLayerByAttribute_management(inMemoryTempLayer, 'NEW_SELECTION', selectWhereClause)
                
                tempResult = GetCount_management(inMemoryTempLayer)
                
                offsetAccidentsNK = int(tempResult.getOutput(0))
                
            else:
                pass
            
            try:
                gcPercent = "{0:.2f}".format((float(geocodedAccidents) / totalAccidents) * 100)
                gcNKPercent = "{0:.2f}".format((float(geocodedAccidentsNK) / totalAccidents) * 100)
                ofsPercent = "{0:.2f}".format((float(offsetAccidents) / totalAccidents) * 100)
                ofsNKPercent = "{0:.2f}".format((float(offsetAccidentsNK) / totalAccidents) * 100)
            except ZeroDivisionError:
                gcPercent = None
                gcNKPercent = None
                ofsPercent = None
                ofsNKPercent = None
            except:
                pass
            
            gcKTotal += geocodedAccidents
            gcNKTotal += geocodedAccidentsNK
            ofsKTotal += offsetAccidents
            ofsNKTotal += offsetAccidentsNK
            NG911CoAccidents += totalAccidents
            
            print("\n" + countyName + " County has " + str(totalAccidents) + " totalAccidents.")
            print("gcPercent: " + gcPercent + " gcNKPercent: " + gcNKPercent +
                  " ofsPercent: " + ofsPercent + " ofsNKPercent: " + ofsNKPercent)
            
        # To get the withRoadsTotal, sum the number for each county that
        # returned a non-zero result for totalAccidents.
        
        else:
            pass
        
        reportResult = [countyName, totalAccidents, gcPercent, gcNKPercent, ofsPercent, ofsNKPercent]
        reportResultsList.append(reportResult)
    
    try:
        Delete_management(inMemoryTempLayer)
    except:
        pass
    
    MakeTableView_management(accDataFullTable, inMemoryTempLayer)
    
    tempResult = GetCount_management(inMemoryTempLayer)
    
    overallTotal = int(tempResult.getOutput(0))
    
    for reportResultItem in reportResultsList:
        print str(reportResultItem[0])
    
    gcNG911Percent = "{0:.2f}".format((float(gcKTotal) / NG911CoAccidents) * 100)
    gcNKNG911Percent = "{0:.2f}".format((float(gcNKTotal) / NG911CoAccidents) * 100)
    ofsNG911Percent = "{0:.2f}".format((float(ofsKTotal) / NG911CoAccidents) * 100)
    ofsNKNG911Percent = "{0:.2f}".format((float(ofsNKTotal) / NG911CoAccidents) * 100)
    
    print "\n" + "The NG911Total is: " + str(NG911CoAccidents)
    print( " with gcPercent: " + gcNG911Percent + " gcNKPercent: " + gcNKNG911Percent + 
           " ofsPercent: " + ofsNG911Percent + " ofsNKPercent: " + ofsNKNG911Percent)
    
    reportResult = ["NG911Total", NG911CoAccidents, gcNG911Percent, gcNKNG911Percent, ofsNG911Percent, ofsNKNG911Percent]
    reportResultsList.append(reportResult)
    
    gcOverallPercent = "{0:.2f}".format((float(gcKTotal) / overallTotal) * 100)
    gcNKOverallPercent = "{0:.2f}".format((float(gcNKTotal) / overallTotal) * 100)
    ofsOverallPercent = "{0:.2f}".format((float(ofsKTotal) / overallTotal) * 100)
    ofsNKOverallPercent = "{0:.2f}".format((float(ofsNKTotal) / overallTotal) * 100)
    
    print "\n" + "The OverallTotal is: " + str(overallTotal)
    print (" with gcPercent: " + gcOverallPercent + " gcNKPercent: " + gcNKOverallPercent +
           " ofsPercent: " + ofsOverallPercent + " ofsNKPercent: " + ofsNKOverallPercent)
    
    reportResult = ["OverallTotal", overallTotal, gcOverallPercent, gcNKOverallPercent, ofsOverallPercent, ofsNKOverallPercent]
    reportResultsList.append(reportResult)
    
    resultsTablePath = recreateResultsTable()
    
    # Delete the previous table information, if any, then create an insert cursor
    # and place all of the report result items in the table.
    
    newICursor = InsertCursor(resultsTablePath, insertCursorFields)
    
    for reportResultItem in reportResultsList:
        insertedRowID = newICursor.insertRow(reportResultItem)
        print "Inserted a new row into the REPORT_INFO table with OID: " + str(insertedRowID)
Example #34
0
def getFastCount(lyr):
    from arcpy import GetCount_management
    result = GetCount_management(lyr)
    count = int(result.getOutput(0))
    return count
Example #35
0
def ListSplitAndSelect():
    ## Fragment used for testing, will not run properly on its own.
    
    delcount = GetCount_management(r'Database Connections\CANT_CPMS.sde\CPMS.CPMS_FMIS_GIS_DEL_ROWS')
    print str(delcount)+" records to delete"
    deletelist=list()
    ## Only portion below changed.
    if (not True): # Production version tests for delcount threshold.
        pass

    else: #ORA 01795 - Oracle limited to 1000 statements with select in *
        MakeTableView_management(FMIS_LOAD, "FMIS_TABLE")  # @UndefinedVariable
        MakeTableView_management(deltbl, "deletes")  # @UndefinedVariable
        with da.SearchCursor(deltbl, "PROJECT_NUMBER") as delcur:  # @UndefinedVariable
            for row in delcur:
                DelXID=  ("{0}".format(row[0]))
                #print DelXID + " is being deleted from the FMIS table"
                #AddJoin_management(layer_name,"CROSSINGID", deltbl, "CROSSINGID", "KEEP_ALL")
                #delsel = "PROJECT_NUMBER LIKE '"+DelXID+"'"
                #print delsel
                deletelist.append(DelXID)
                #SelectLayerByAttribute_management("FMIS_TABLE", "ADD_TO_SELECTION", delsel) #delsel not yet defined
                #SelectLayerByAttribute_management("FMIS_TABLE","ADD_TO_SELECTION", delsel)
        #print str(deletelist)
                
        #Take care of > 1000 selection issue here by splitting the long list into a series of lists.
        
        maxListSize = 999
        listStart = 0
        listEnd = maxListSize
        i = 0
        curListSize = len(deletelist)
        loopNumber = mathCeil(curListSize / maxListSize)
        firstContainer = list()
        
        # Creates the list container that holds the lists with the project numbers.
        while i <= loopNumber:
            if listEnd > curListSize:
                listEnd = curListSize
            else:
                pass
            listToHold = deletelist[listStart:listEnd]
            firstContainer.append(listToHold)
            i += 1
            listStart = listEnd
            listEnd = listEnd + maxListSize
        
        for secondContainer in firstContainer:
            delsel = "PROJECT_NUMBER IN ("
            for projectNum in secondContainer:
                delsel = delsel + """'"""+projectNum+"""', """
            delsel = delsel[:-2] # Slice that removes the last comma and trailing space.
            delsel = delsel + ")" # Adds the closing parenthesis.
            SelectLayerByAttribute_management("FMIS_TABLE", "ADD_TO_SELECTION", delsel) # ADD_TO_SELECTION works like NEW_SELECTION when no current selection.
            print delsel
        
        countResult = GetCount_management("FMIS_TABLE")
        countNum = int(countResult.getOutput(0))
        print countNum
        
        #DeleteRows_management("FMIS_TABLE")
    print "Delete function completed"
Example #36
0
def checkRequiredFieldValues(gdb, folder, esb):
    userMessage("Checking that required fields have all values...")
    from os import path
    from arcpy.da import Walk, SearchCursor
    from arcpy import MakeTableView_management, Delete_management, GetCount_management, ListFields
    from time import strftime

    #get today's date
    today = strftime("%m/%d/%y")

    #get required fields
    rfDict = getRequiredFields(folder)

    id = "OBJECTID"
    values = []

    #walk through the tables/feature classes
    for dirpath, dirnames, filenames in Walk(gdb, True, '', False, ["Table","FeatureClass"]):
        for filename in filenames:
            fullPath = path.join(gdb, filename)

            #get the keyword to acquire required field names
            keyword = getKeyword(filename, esb)

            #goal: get list of required fields that are present in the feature class
            #get the appropriate required field list
            if keyword in rfDict:
                requiredFieldList = rfDict[keyword]

            rfl = []
            for rf in requiredFieldList:
                rfl.append(rf.upper())

            #get list of fields in the feature class
            allFields = ListFields(fullPath)

            #make list of field names
            fields = []
            for aF in allFields:
                fields.append(aF.name.upper())

            #convert lists to sets
            set1 = set(rfl)
            set2 = set(fields)

            #get the set of fields that are the same
            matchingFields = list(set1 & set2)

            #create where clause to select any records where required values aren't populated
            wc = ""

            for field in matchingFields:
                wc = wc + " " + field + " is null or "

            wc = wc[0:-4]

            #make table view using where clause
            lyr = "lyr"
            MakeTableView_management(fullPath, lyr, wc)

            #get count of the results
            result = GetCount_management(lyr)
            count = int(result.getOutput(0))

            #if count is greater than 0, it means a required value somewhere isn't filled in
            if count > 0:
                #make sure the objectID gets included in the search for reporting
                if id not in matchingFields:
                    matchingFields.append(id)

                i = len(matchingFields)
                k = 0

                #run a search cursor to get any/all records where a required field value is null
                with SearchCursor(fullPath, (matchingFields), wc) as rows:
                    for row in rows:

                        #get object ID of the field
                        oid = str(row[matchingFields.index(id)])

                        #loop through row
                        while k < 0:
                            #see if the value is nothing
                            if row[k] is None:
                                #report the value if it is indeed null
                                report = matchingFields[k] + " is null for ObjectID " + oid
                                userMessage(report)
                                val = (today, report, filename, matchingFields[k], oid)
                                values.append(val)

                            #iterate!
                            k = k + 1
            else:
                userMessage( "All required values present for " + filename)

            Delete_management(lyr)

    if values != "":
        RecordResults("fieldValues", values, gdb)

    userMessage("Completed check for required field values")
Example #37
0
def geocodeAddressPoints(pathsInfoObject):
    gdb = pathsInfoObject.gdbPath

    env.workspace = gdb
    addressPointPath = "AddressPoints"
    streetPath = "RoadCenterline"
    roadAliasPath = "RoadAlias"

    userMessage("Geocoding address points...")

    gc_table = "GeocodeTable"
    sl_field = "SingleLineInput"
    Locator = "Locator"
    addyview = "addy_view"
    output = "gc_test"

    # Get the fields from the input
    fields = ListFields(addressPointPath)

    # Create a fieldinfo object
    fieldinfo = FieldInfo()

    # Iterate through the fields and set them to fieldinfo
    for field in fields:
        if field.name in ("LABEL", "ZIP"):
            fieldinfo.addField(field.name, field.name, "VISIBLE", "")
    else:
        fieldinfo.addField(field.name, field.name, "HIDDEN", "")

    userMessage("Preparing addresses...")
    # The created addyview layer will have fields as set in fieldinfo object
    MakeTableView_management(addressPointPath, addyview, "", "", fieldinfo)

    # To persist the layer on disk make a copy of the view
    if Exists(gc_table):
        try:
            Delete_management(gc_table)
        except:
            userMessage("Please manually delete the table called gc_table and then run the geocoding again")

    if not Exists(gc_table):
        CopyRows_management(addyview, gc_table)

        #add single line input field for geocoding
        AddField_management(gc_table, sl_field, "TEXT", "", "", 250)

        #calculate field
        exp = '[LABEL] & " " & [ZIP]'
        CalculateField_management(gc_table, sl_field, exp, "VB")

        #generate locator
        fieldMap = """'Primary Table:Feature ID' <None> VISIBLE NONE;'*Primary Table:From Left' RoadCenterline:L_F_ADD VISIBLE NONE;
        '*Primary Table:To Left' RoadCenterline:L_T_ADD VISIBLE NONE;'*Primary Table:From Right' RoadCenterline:R_F_ADD VISIBLE NONE;
        '*Primary Table:To Right' RoadCenterline:R_T_ADD VISIBLE NONE;'Primary Table:Prefix Direction' RoadCenterline:PRD VISIBLE NONE;
        'Primary Table:Prefix Type' RoadCenterline:STP VISIBLE NONE;'*Primary Table:Street Name' RoadCenterline:RD VISIBLE NONE;
        'Primary Table:Suffix Type' RoadCenterline:STS VISIBLE NONE;'Primary Table:Suffix Direction' RoadCenterline:POD VISIBLE NONE;
        'Primary Table:Left City or Place' RoadCenterline:MUNI_L VISIBLE NONE;
        'Primary Table:Right City or Place' RoadCenterline:MUNI_R VISIBLE NONE;
        'Primary Table:Left ZIP Code' RoadCenterline:ZIP_L VISIBLE NONE;'Primary Table:Right ZIP Code' RoadCenterline:ZIP_R VISIBLE NONE;
        'Primary Table:Left State' RoadCenterline:STATE_L VISIBLE NONE;'Primary Table:Right State' RoadCenterline:STATE_R VISIBLE NONE;
        'Primary Table:Left Street ID' <None> VISIBLE NONE;'Primary Table:Right Street ID' <None> VISIBLE NONE;
        'Primary Table:Min X value for extent' <None> VISIBLE NONE;'Primary Table:Max X value for extent' <None> VISIBLE NONE;
        'Primary Table:Min Y value for extent' <None> VISIBLE NONE;'Primary Table:Max Y value for extent' <None> VISIBLE NONE;
        'Primary Table:Left Additional Field' <None> VISIBLE NONE;'Primary Table:Right Additional Field' <None> VISIBLE NONE;
        'Primary Table:Altname JoinID' RoadCenterline:SEGID VISIBLE NONE;'*Alternate Name Table:JoinID' RoadAlias:SEGID VISIBLE NONE;
        'Alternate Name Table:Prefix Direction' RoadAlias:A_PRD VISIBLE NONE;'Alternate Name Table:Prefix Type' <None> VISIBLE NONE;
        'Alternate Name Table:Street Name' RoadAlias:A_RD VISIBLE NONE;'Alternate Name Table:Suffix Type' RoadAlias:A_STS VISIBLE NONE;
        'Alternate Name Table:Suffix Direction' RoadAlias:A_POD VISIBLE NONE"""

        userMessage("Creating address locator...")
        # Process: Create Address Locator
        if Exists(Locator):
            RebuildAddressLocator_geocoding(Locator)
        else:
            try:
                CreateAddressLocator_geocoding("US Address - Dual Ranges", streetPath + " 'Primary Table';" + roadAliasPath + " 'Alternate Name Table'", fieldMap, Locator, "")
            except:
                try:
                    fieldMap = """'Primary Table:Feature ID' <None> VISIBLE NONE;'*Primary Table:From Left' RoadCenterline:L_F_ADD VISIBLE NONE;
                    '*Primary Table:To Left' RoadCenterline:L_T_ADD VISIBLE NONE;'*Primary Table:From Right' RoadCenterline:R_F_ADD VISIBLE NONE;
                    '*Primary Table:To Right' RoadCenterline:R_T_ADD VISIBLE NONE;'Primary Table:Prefix Direction' RoadCenterline:PRD VISIBLE NONE;
                    'Primary Table:Prefix Type' RoadCenterline:STP VISIBLE NONE;'*Primary Table:Street Name' RoadCenterline:RD VISIBLE NONE;
                    'Primary Table:Suffix Type' RoadCenterline:STS VISIBLE NONE;'Primary Table:Suffix Direction' RoadCenterline:POD VISIBLE NONE;
                    'Primary Table:Left City or Place' RoadCenterline:MUNI_L VISIBLE NONE;
                    'Primary Table:Right City or Place' RoadCenterline:MUNI_R VISIBLE NONE;
                    'Primary Table:Left ZIP Code' RoadCenterline:ZIP_L VISIBLE NONE;'Primary Table:Right ZIP Code' RoadCenterline:ZIP_R VISIBLE NONE;
                    'Primary Table:Left State' RoadCenterline:STATE_L VISIBLE NONE;'Primary Table:Right State' RoadCenterline:STATE_R VISIBLE NONE;
                    'Primary Table:Left Street ID' <None> VISIBLE NONE;'Primary Table:Right Street ID' <None> VISIBLE NONE;
                    'Primary Table:Display X' <None> VISIBLE NONE;'Primary Table:Display Y' <None> VISIBLE NONE;
                    'Primary Table:Min X value for extent' <None> VISIBLE NONE;'Primary Table:Max X value for extent' <None> VISIBLE NONE;
                    'Primary Table:Min Y value for extent' <None> VISIBLE NONE;'Primary Table:Max Y value for extent' <None> VISIBLE NONE;
                    'Primary Table:Left Additional Field' <None> VISIBLE NONE;'Primary Table:Right Additional Field' <None> VISIBLE NONE;
                    'Primary Table:Altname JoinID' RoadCenterline:SEGID VISIBLE NONE;'*Alternate Name Table:JoinID' RoadAlias:SEGID VISIBLE NONE;
                    'Alternate Name Table:Prefix Direction' RoadAlias:A_PRD VISIBLE NONE;'Alternate Name Table:Prefix Type' <None> VISIBLE NONE;
                    'Alternate Name Table:Street Name' RoadAlias:A_RD VISIBLE NONE;'Alternate Name Table:Suffix Type' RoadAlias:A_STS VISIBLE NONE;
                    'Alternate Name Table:Suffix Direction' RoadAlias:A_POD VISIBLE NONE"""
                    CreateAddressLocator_geocoding("US Address - Dual Ranges", streetPath + " 'Primary Table';" + roadAliasPath + " 'Alternate Name Table'", fieldMap, Locator, "", "DISABLED")
                except Exception as E:
                    userMessage(Locator)
                    userMessage("Cannot create address locator. Please email [email protected] this error message: " + str(E))


        if Exists(Locator):
            userMessage("Geocoding addresses...")

            #geocode table address
            if Exists(output):
                Delete_management(output)

            i = 0

            #set up geocoding
            gc_fieldMap = "Street LABEL VISIBLE NONE;City MUNI VISIBLE NONE;State State VISIBLE NONE;ZIP ZIP VISIBLE NONE"

            #geocode addresses
            try:
                GeocodeAddresses_geocoding(gc_table, Locator, gc_fieldMap, output, "STATIC")
                i = 1
            except:
                gc_fieldMap = "Street LABEL VISIBLE NONE;City MUNI VISIBLE NONE;State State VISIBLE NONE"

                try:
                    GeocodeAddresses_geocoding(gc_table, Locator, gc_fieldMap, output, "STATIC")
                    i = 1
                except:
                    userMessage("Could not geocode address points")

            #report records that didn't geocode
            if i == 1:
                wc = "Status <> 'M'"
                lyr = "lyr"

                MakeFeatureLayer_management(output, lyr, wc)

                rStatus = GetCount_management(lyr)
                rCount = int(rStatus.getOutput(0))

                if rCount > 0:
                    #set up parameters to report records that didn't geocode
                    values = []
                    recordType = "fieldValues"
                    today = strftime("%m/%d/%y")
                    filename = "AddressPoints"

                    rfields = ("ADDID")
                    with SearchCursor(output, rfields, wc) as rRows:
                        for rRow in rRows:
                            fID = rRow[0]
                            report = str(fID) + " did not geocode against centerline"
                            val = (today, report, filename, "", fID)
                            values.append(val)

                    #report records
                    if values != []:
                        RecordResults(recordType, values, gdb)

                    userMessage("Completed geocoding with " + str(rCount) + " errors.")

                else:
                    #this means all the records geocoded
                    userMessage("All records geocoded successfully.")
                    try:
                        Delete_management(output)
                    except:
                        userMessage("Geocoding table could not be deleted")
        else:
            userMessage("Could not geocode addresses")
def recalculateKeyValues():
    # As long as the KDOT_LRS_KEY is not null, calculate from the
    # current fields.

    # Prior to doing any of this, I added a field to cache the
    # current KDOT_LRS_KEY to check for mistakes and recover from
    # them if any were found.

    # Use the prefix field to decide on what action to take to update the KDOTRouteId.
    # If the prefix is null, do nothing.
    # If the prefix is I, U, K, create the KDOTRouteId value from the SHS component parts.
    selectionQuery = """ "LRS_ROUTE_PREFIX" IN ('I', 'U', 'K') """
    necessaryFields = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "KDOT_DIRECTION_CALC"
    ]
    dynNonNullSelectionQuery = GenerateNonNullSelectionQuery(necessaryFields)
    fullSelectionQuery = selectionQuery + """ AND """ + dynNonNullSelectionQuery

    fieldsToUseForUpdating = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "KDOT_DIRECTION_CALC",
        "KDOT_LRS_KEY"
    ]

    newCursor = daUpdateCursor(fcAsFeatureLayer, fieldsToUseForUpdating,
                               fullSelectionQuery)

    for cursorRowItem in newCursor:
        cursorListItem = list(cursorRowItem)
        countyPre = cursorListItem[0]
        routePre = cursorListItem[1]
        routeNum = cursorListItem[2]
        routeSuf = cursorListItem[3]
        lrsUniqueIdent = cursorListItem[4]
        if len(lrsUniqueIdent) > 1:
            lrsUniqueIdent = lrsUniqueIdent[-1]
        else:
            pass
        directionCalc = cursorListItem[5]
        directionText = ''

        # Modified 2017-17-27 to fix the issue of non-primary sides, esp. on odd-numbered routes, receiving '-EB'.
        try:
            if int(routeNum) % 2 == 0:
                if directionCalc is not None and int(directionCalc) == 1:
                    directionText = '-WB'
                else:
                    # Default, if the non-primary side is receiving this, make sure that it has a 1 in the directionCalc.
                    directionText = '-EB'

            if int(routeNum) % 2 == 1:
                if directionCalc is not None and int(directionCalc) == 1:
                    directionText = '-SB'
                else:
                    # Default, if the non-primary side is receiving this, make sure that it has a 1 in the directionCalc.
                    directionText = '-NB'
            newKey = str(countyPre) + str(routePre) + str(routeNum) + str(
                routeSuf) + str(lrsUniqueIdent) + directionText
            cursorListItem[6] = newKey
            # For Debugging
            ##print("Updating the lrs key to: " + str(newKey) + ".")
            newCursor.updateRow(cursorListItem)
        except:
            try:
                print(traceback.format_exc())
                print("Could not calculate a new LRS_KEY for the given row.")
                print("The row looks like this: " + str(cursorListItem) + ".")
            except:
                pass
            newCursor.next()

    try:
        del newCursor
    except:
        pass

    ###------------------------------------------------------------------------------------------------------------###
    ### If the prefix is not I, U, K and not X, create the KDOTRouteID from the Non-SHS, Non-Ramp component parts. ###
    ###------------------------------------------------------------------------------------------------------------###

    # For prefix R & M
    selectionQuery = """ "LRS_ROUTE_PREFIX" IN ('R', 'M') """
    necessaryFields = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "LRS_ADMO"
    ]
    dynNonNullSelectionQuery = GenerateNonNullSelectionQuery(necessaryFields)
    fullSelectionQuery = selectionQuery + """ AND """ + dynNonNullSelectionQuery

    fieldsToUseForUpdating = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "LRS_ADMO",
        "KDOT_DIRECTION_CALC", "KDOT_LRS_KEY"
    ]

    newCursor = daUpdateCursor(fcAsFeatureLayer, fieldsToUseForUpdating,
                               fullSelectionQuery)

    for cursorRowItem in newCursor:
        cursorListItem = list(cursorRowItem)
        countyPre = cursorListItem[0]
        routePre = cursorListItem[1]
        routeNum = cursorListItem[2]
        routeSuf = cursorListItem[3]
        lrsUniqueIdent = cursorListItem[4]
        if len(lrsUniqueIdent) > 1:
            lrsUniqueIdent = lrsUniqueIdent[
                -1]  # Get the right-most value. e.g. 47 => 7, 52 => 2
        else:
            pass
        lrsAdmo = cursorListItem[5]
        directionCalc = cursorListItem[6]
        if directionCalc is None:
            directionCalc = '0'
        else:
            pass
        try:
            newKey = str(countyPre) + str(routePre) + str(routeNum) + str(
                routeSuf) + str(lrsUniqueIdent) + str(lrsAdmo) + str(
                    directionCalc)
            cursorListItem[7] = newKey
            newCursor.updateRow(cursorListItem)
        except:
            try:
                print(traceback.format_exc())
                print("Could not calculate a new LRS_KEY for the given row.")
                print("The row looks like this: " + str(cursorListItem) + ".")
            except:
                pass
            newCursor.next()

    try:
        del newCursor
    except:
        pass

    # For prefix C, Urban Classified, which uses LRS_URBAN_PRE.
    selectionQuery = """ "LRS_ROUTE_PREFIX" IN ('C') """
    necessaryFields = [
        "LRS_URBAN_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "LRS_ADMO"
    ]
    dynNonNullSelectionQuery = GenerateNonNullSelectionQuery(necessaryFields)
    # Uses LRS_ADMO
    ####LRS_ROUTE_NUM, LRS_ROUTE_SUFFIX, LRS_UNIQUE_IDENT, then LRS_ADMO, then 0 for inventory direction.
    fullSelectionQuery = selectionQuery + """ AND """ + dynNonNullSelectionQuery

    fieldsToUseForUpdating = [
        "LRS_URBAN_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "LRS_ADMO",
        "KDOT_DIRECTION_CALC", "KDOT_LRS_KEY"
    ]

    newCursor = daUpdateCursor(fcAsFeatureLayer, fieldsToUseForUpdating,
                               fullSelectionQuery)

    for cursorRowItem in newCursor:
        cursorListItem = list(cursorRowItem)
        urbanPre = cursorListItem[0]
        routePre = cursorListItem[1]
        routeNum = cursorListItem[2]
        routeSuf = cursorListItem[3]
        lrsUniqueIdent = cursorListItem[4]
        if len(lrsUniqueIdent) > 1:
            lrsUniqueIdent = lrsUniqueIdent[
                -1]  # Get the right-most value. e.g. 47 => 7, 52 => 2
        else:
            pass
        lrsAdmo = cursorListItem[5]
        directionCalc = cursorListItem[6]
        if directionCalc is None:
            directionCalc = '0'
        else:
            pass
        try:
            newKey = str(urbanPre) + str(routePre) + str(routeNum) + str(
                routeSuf) + str(lrsUniqueIdent) + str(lrsAdmo) + str(
                    directionCalc)
            cursorListItem[7] = newKey
            newCursor.updateRow(cursorListItem)
        except:
            try:
                print(traceback.format_exc())
                print("Could not calculate a new LRS_KEY for the given row.")
                print("The row looks like this: " + str(cursorListItem) + ".")
            except:
                pass
            newCursor.next()

    try:
        del newCursor
    except:
        pass

    # If the prefix is X, create the KDOTRouteID from the Ramp route component parts.
    selectionQuery = """ "LRS_ROUTE_PREFIX" = 'X' """
    # Doesn't make sense to require *_SUFFIX on ramps. - Just use '0' if it is null.
    # Only 12 Ramps have non-null LRS_ROUTE_SUFFIX values. For those, it is all '0' or 'No Suffix'.
    # If people set LRS_ROUTE_SUFFIX to 'G' or 'Z' for ramps though, that needs to be handled correctly.
    necessaryFields = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_UNIQUE_IDENT", "LRS_ADMO"
    ]
    dynNonNullSelectionQuery = GenerateNonNullSelectionQuery(necessaryFields)
    fullSelectionQuery = selectionQuery + """ AND """ + dynNonNullSelectionQuery

    fieldsToUseForUpdating = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "LRS_ADMO",
        "KDOT_DIRECTION_CALC", "KDOT_LRS_KEY"
    ]

    newCursor = daUpdateCursor(fcAsFeatureLayer, fieldsToUseForUpdating,
                               fullSelectionQuery)

    for cursorRowItem in newCursor:
        cursorListItem = list(cursorRowItem)
        countyPre = cursorListItem[0]
        routePre = cursorListItem[1]
        routeNum = cursorListItem[2]
        routeSuf = cursorListItem[3]
        if routeSuf is None:
            routeSuf = '0'
        else:  # Use whatever character is in the Route Suffix if it's not None/Null.
            pass
        lrsUniqueIdent = cursorListItem[4]
        if len(lrsUniqueIdent) > 1:
            lrsUniqueIdent = lrsUniqueIdent[-1]
        else:
            pass
        lrsAdmo = cursorListItem[5]
        directionCalc = cursorListItem[6]
        if directionCalc is None:
            directionCalc = '0'
        else:
            pass
        try:
            newKey = str(countyPre) + str(routePre) + str(routeNum) + str(
                routeSuf) + str(lrsUniqueIdent) + str(lrsAdmo) + str(
                    directionCalc)
            cursorListItem[7] = newKey
            newCursor.updateRow(cursorListItem)
        except:
            try:
                print(traceback.format_exc())
                print("Could not calculate a new LRS_KEY for the given row.")
                print("The row looks like this: " + str(cursorListItem) + ".")
            except:
                pass
            newCursor.next()

    try:
        del newCursor
    except:
        pass

    # For all other prefixes.
    selectionQuery = """ "LRS_ROUTE_PREFIX" NOT IN ('I', 'U', 'K', 'X', 'R', 'M', 'C') """
    necessaryFields = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "LRS_ADMO"
    ]
    dynNonNullSelectionQuery = GenerateNonNullSelectionQuery(necessaryFields)
    fullSelectionQuery = selectionQuery + """ AND """ + dynNonNullSelectionQuery

    fieldsToUseForUpdating = [
        "LRS_COUNTY_PRE", "LRS_ROUTE_PREFIX", "LRS_ROUTE_NUM",
        "LRS_ROUTE_SUFFIX", "LRS_UNIQUE_IDENT", "LRS_ADMO",
        "KDOT_DIRECTION_CALC", "KDOT_LRS_KEY"
    ]

    newCursor = daUpdateCursor(fcAsFeatureLayer, fieldsToUseForUpdating,
                               fullSelectionQuery)

    for cursorRowItem in newCursor:
        cursorListItem = list(cursorRowItem)
        countyPre = cursorListItem[0]
        routePre = cursorListItem[1]
        routeNum = cursorListItem[2]
        routeSuf = cursorListItem[3]
        lrsUniqueIdent = cursorListItem[4]
        if len(lrsUniqueIdent) > 1:
            lrsUniqueIdent = lrsUniqueIdent[-1]
        else:
            pass
        lrsAdmo = cursorListItem[5]
        directionCalc = cursorListItem[6]
        if directionCalc is None:
            directionCalc = '0'
        else:
            pass
        try:
            newKey = str(countyPre) + str(routePre) + str(routeNum) + str(
                routeSuf) + str(lrsUniqueIdent) + str(lrsAdmo) + str(
                    directionCalc)
            cursorListItem[7] = newKey
            newCursor.updateRow(cursorListItem)
        except:
            try:
                print(traceback.format_exc())
                print("Could not calculate a new LRS_KEY for the given row.")
                print("The row looks like this: " + str(cursorListItem) + ".")
            except:
                pass
            newCursor.next()

    try:
        del newCursor
    except:
        pass

    # Something's not right. The calculates should fail every time because the n1 fields don't exist in this layer yet. :(
    selectionQuery = """ "KDOT_LRS_KEY" IS NOT NULL """
    SelectLayerByAttribute_management(fcAsFeatureLayer, "NEW_SELECTION",
                                      selectionQuery)
    # SourceRouteId = KDOT_LRS_KEY
    CalculateField_management(fcAsFeatureLayer, n1RouteId,
                              "!" + str(KDOTRouteId) + "!", "PYTHON_9.3")
    # SourceFromMeasure = county_log_begin
    CalculateField_management(fcAsFeatureLayer, n1FromMeas,
                              "!" + str(KDOTMeasBeg) + "!", "PYTHON_9.3")
    # SourceToMeasure = county_log_end
    CalculateField_management(fcAsFeatureLayer, n1ToMeas,
                              "!" + str(KDOTMeasEnd) + "!", "PYTHON_9.3")
    selectionQuery = """ KDOT_LRS_KEY IS NOT NULL AND county_log_begin IS NULL AND county_log_end IS NULL AND (COUNTY_BEGIN_MP IS NOT NULL OR COUNTY_END_MP IS NOT NULL) """
    SelectLayerByAttribute_management(fcAsFeatureLayer, "NEW_SELECTION",
                                      selectionQuery)
    countResult = GetCount_management(fcAsFeatureLayer)
    intCount = int(countResult.getOutput(0))
    print(
        "After the new selection query to deal with the fact that some State routes did not have their begin and end measure populated correctly, "
        + str(intCount) + " segments were selected.")
    # SourceFromMeasure = COUNTY_BEGIN_MP
    CalculateField_management(fcAsFeatureLayer, n1FromMeas,
                              "!COUNTY_BEGIN_MP!", "PYTHON_9.3")
    # SourceToMeasure = COUNTY_END_MP
    CalculateField_management(fcAsFeatureLayer, n1ToMeas, "!COUNTY_END_MP!",
                              "PYTHON_9.3")
            # Get the name of each city
            nameCity = row[0]

            # 
            queryString = '"' + nameField + '" = ' + "'" + nameCity + "'"

            # Make a feature layer of just the current city polygon
            # The queryString is reponsible for select only the feature of city
            MakeFeatureLayer_management(cityBoundaries, "CurrentCityLayer", queryString) 
            MakeFeatureLayer_management(parkAndRide, "ParkAndRide_lry")

            # Selecty by location all feature of park and ride that contain in current city
            SelectLayerByLocation_management("ParkAndRide_lry", "CONTAINED_BY", "CurrentCityLayer")

            # Get the total value of park and ride for each city
            countPark = GetCount_management("ParkAndRide_lry")
            totalPark = int(countPark.getOutput(0))

            # Count of city in cityBoundaries
            totalCity += 1

            # If the total park and rise is bigger then 1
            if totalPark > 1:

                # The row in field HasTwoParkAndRides update to "True"
                row[1] = "True"
            
                # Count each city has than more that two park and ride in your limits
                CityWithTwoParkAndRides += 1

            else:
Example #40
0
def checkRequiredFieldValues(pathsInfoObject):
    gdb = pathsInfoObject.gdbPath
    folder = pathsInfoObject.domainsFolderPath
    esb = pathsInfoObject.esbList
    version = pathsInfoObject.gdbVersion

    userMessage("Checking that required fields have all values...")

    #get today's date
    today = strftime("%m/%d/%y")

    #get required fields
    rfDict = getRequiredFields(folder, version)

    if rfDict != {}:

        values = []

        #walk through the tables/feature classes
        for dirpath, dirnames, filenames in Walk(gdb, True, '', False, ["Table","FeatureClass"]):
            for filename in filenames:
                if filename.upper() not in ("FIELDVALUESCHECKRESULTS", "TEMPLATECHECKRESULTS"):
                    fullPath = path.join(gdb, filename)
                    if filename.upper() in esb:
                        layer = "ESB"
                    else:
                        layer = filename.upper()
                    id1 = getUniqueIDField(layer)
                    if id1 != "":

                        #get the keyword to acquire required field names
                        keyword = getKeyword(filename, esb)

                        #goal: get list of required fields that are present in the feature class
                        #get the appropriate required field list
                        if keyword in rfDict:
                            requiredFieldList = rfDict[keyword]

                        rfl = []
                        for rf in requiredFieldList:
                            rfl.append(rf.upper())

                        #get list of fields in the feature class
                        allFields = ListFields(fullPath)

                        #make list of field names
                        fields = []
                        for aF in allFields:
                            fields.append(aF.name.upper())

                        #convert lists to sets
                        set1 = set(rfl)
                        set2 = set(fields)

                        #get the set of fields that are the same
                        matchingFields = list(set1 & set2)

                        #only work with records that are for submission
                        lyr2 = "lyr2"
                        if version == "10":
                            MakeTableView_management(fullPath, lyr2)
                        else:
                            wc2 = "SUBMIT = 'Y'"
                            MakeTableView_management(fullPath, lyr2, wc2)

                        #create where clause to select any records where required values aren't populated
                        wc = ""

                        for field in matchingFields:
                            wc = wc + " " + field + " is null or "

                        wc = wc[0:-4]

                        #make table view using where clause
                        lyr = "lyr"
                        MakeTableView_management(lyr2, lyr, wc)

                        #get count of the results
                        result = GetCount_management(lyr)
                        count = int(result.getOutput(0))


                        #if count is greater than 0, it means a required value somewhere isn't filled in
                        if count > 0:
                            #make sure the objectID gets included in the search for reporting
                            if id1 not in matchingFields:
                                matchingFields.append(id1)

                            #run a search cursor to get any/all records where a required field value is null
                            with SearchCursor(fullPath, (matchingFields), wc) as rows:
                                for row in rows:
                                    k = 0
                                    #get object ID of the field
                                    oid = str(row[matchingFields.index(id1)])

                                    #loop through row
                                    while k < len(matchingFields):
                                        #see if the value is nothing
                                        if row[k] is None:
                                            #report the value if it is indeed null
                                            report = matchingFields[k] + " is null for Feature ID " + oid
                                            userMessage(report)
                                            val = (today, report, filename, matchingFields[k], oid)
                                            values.append(val)

                                        #iterate!
                                        k = k + 1
                        else:
                            userMessage( "All required values present for " + filename)

                        Delete_management(lyr)
                        Delete_management(lyr2)

        if values != []:
            RecordResults("fieldValues", values, gdb)

        userMessage("Completed check for required field values: " + str(len(values)) + " issues found")

    else:
        userMessage("Could not check required field values")
Example #41
0
def checkUniqueIDFrequency(currentPathSettings):
    gdb = currentPathSettings.gdbPath
    esbList = currentPathSettings.esbList
    fcList = currentPathSettings.fcList

    layerList = []

    env.workspace = gdb
    table = "ESB_IDS"

    #create temp table of esbID's
    if esbList <> []:
        layerList = ["ESB_IDS"]

        if Exists(table):
            Delete_management(table)

        CreateTable_management(gdb, table)

        AddField_management(table, "ESBID", "TEXT", "", "", 38)
        AddField_management(table, "ESB_LYR", "TEXT", "", "", 15)

        esbFields = ("ESBID")

        #copy ID's & esb layer type into the table
        for esb in esbList:
            with SearchCursor(esb, esbFields) as rows:
                for row in rows:
                    cursor = InsertCursor(table, ('ESBID', 'ESB_LYR'))
                    cursor.insertRow((row[0], esb))

        try:
            #clean up
            del rows, row, cursor
        except:
            print "objects cannot be deleted, they don't exist"

    else:
        for fc in fcList:
            fc = basename(fc)
            layerList.append(fc)

    #loop through layers in the gdb that aren't esb & ESB_IDS
##    layers = getCurrentLayerList(esb)
##    layers.append("ESB_IDS")

    values = []
    recordType = "fieldValues"
    today = strftime("%m/%d/%y")

    for layer in layerList:
##        if layer not in esb:
        if layer != "ESB_IDS":
            #for each layer, get the unique ID field
            uniqueID = getUniqueIDField(layer.upper())

        else:
            #for esb layers, get the unique ID field
            uniqueID = "ESBID"

        Statistics_analysis(layer, layer + "_freq", [[uniqueID,"COUNT"]], uniqueID)

        #set parameters for the search cursor
        where_clause = "FREQUENCY > 1"
        fields = (uniqueID, "FREQUENCY")

        fl = "fl"

        MakeTableView_management(layer + "_freq", fl, where_clause)

        result = GetCount_management(fl)
        count = int(result.getOutput(0))

        if count > 0:

            #set a search cursor with just the unique ID field
            with SearchCursor(layer + "_freq", fields, where_clause) as rows2:
                stringESBReport = ""
                for row2 in rows2:
                    if layer == "ESB_IDS":
                        stringEsbInfo = []
                        wc2 = "ESBID = " + row2[0]
                        with SearchCursor("ESB_IDS", ("ESB_LYR"), wc2) as esbRows:
                            for esbRow in esbRows:
                                stringEsbInfo.append(esbRow[0])

                        stringESBReport = " and ".join(stringEsbInfo)
                    else:
                        lyr = layer

                    #report duplicate IDs
                    report = str(row2[0]) + " is a duplicate ID"
                    if stringESBReport != "":
                        report = report + " in " + stringESBReport
                    val = (today, report, lyr, uniqueID, row2[0])
                    values.append(val)

        Delete_management(layer + "_freq")
        Delete_management(fl)

    #report duplicate records
    if values != []:
        RecordResults(recordType, values, gdb)
        userMessage("Checked unique ID frequency. Results are in table FieldValuesCheckResults.")
    else:
        userMessage("All ID's are unique.")

    #if it exists, clean up table
    if Exists(table):
        Delete_management(table)
Example #42
0
def compute_adjacency_list(input_points, input_network, id_attribute,
                           impedance_attribute, accumulator_attributes,
                           search_radius, output_location, adj_dbf_name):
    """
  |input_points|: point shape file marking entity (e.g. building) locations
  |input_network|: street network in which |input_points| is located
  |id_attribute|: the name of attribute that distinguishes between input points
  |impedance_attribute|: distance between neighboring nodes will be based on
      this attribute
  |accumulator_attributes|: distance between neighboring nodes will also be
      recorded for these attributes
  |search_radius|: the maximum extent for centrality computation
  |output_location|: adjacency list dbf will be saved here
  |adj_dbf_name|: the name of the adjacency list dbf
  """

    # Number of points in |input_points|
    input_point_count = int(GetCount_management(input_points).getOutput(0))

    # Make a directory to store all auxiliary files
    auxiliary_dir = join(output_location, AUXILIARY_DIR_NAME)
    if not Exists(auxiliary_dir):
        mkdir(auxiliary_dir)

    # Record the edge and junction source names of |input_network|
    junction_feature, edge_feature = network_features(input_network)

    # Calculate network locations if not already calculated
    test_input_point = UpdateCursor(input_points).next()
    locations_calculated = all(
        row_has_field(test_input_point, field)
        for field in NETWORK_LOCATION_FIELDS)
    if not locations_calculated:
        calculate_network_locations(input_points, input_network)

    # Calculate barrier cost per input point if not already calculated
    barrier_costs_calculated = row_has_field(test_input_point,
                                             trim(BARRIER_COST_FIELD))
    if not barrier_costs_calculated:
        AddMessage(BARRIER_COST_COMPUTATION_STARTED)
        # Add |BARRIER_COST_FIELD| column in |input_points|
        AddField_management(in_table=input_points,
                            field_name=trim(BARRIER_COST_FIELD),
                            field_type="DOUBLE",
                            field_is_nullable="NON_NULLABLE")

        # Initialize a dictionary to store the frequencies of (SnapX, SnapY) values
        xy_count = {}
        # A method to retrieve a (SnapX, SnapY) pair for a row in |input_points|
        get_xy = lambda row: (row.getValue(trim("SnapX")),
                              row.getValue(trim("SnapY")))

        barrier_pre_progress = Progress_Bar(input_point_count, 1,
                                            BARRIER_COST_PRE_PROCESSING)
        rows = UpdateCursor(input_points)
        for row in rows:
            snap_xy = get_xy(row)
            if snap_xy in xy_count:
                xy_count[snap_xy] += 1
            else:
                xy_count[snap_xy] = 1
            barrier_pre_progress.step()

        # Populate |BARRIER_COST_FIELD|, this will be used in OD matrix computation
        barrier_progress = Progress_Bar(input_point_count, 1,
                                        BARRIER_COST_COMPUTATION)
        rows = UpdateCursor(input_points)
        for row in rows:
            barrier_cost = BARRIER_COST / xy_count[get_xy(row)]
            row.setValue(trim(BARRIER_COST_FIELD), barrier_cost)
            rows.updateRow(row)
            barrier_progress.step()
        AddMessage(BARRIER_COST_COMPUTATION_FINISHED)

    # Necessary files
    od_cost_matrix_layer = join(auxiliary_dir, OD_COST_MATRIX_LAYER_NAME)
    od_cost_matrix_lines = join(od_cost_matrix_layer, OD_COST_MATRIX_LINES)
    temp_adj_dbf_name = TEMP_ADJACENCY_DBF_NAME(adj_dbf_name)
    temp_adj_dbf = join(output_location, temp_adj_dbf_name)
    adj_dbf = join(output_location, adj_dbf_name)
    partial_adj_dbf = join(auxiliary_dir, PARTIAL_ADJACENCY_LIST_NAME)
    polygons = join(auxiliary_dir, POLYGONS_SHAPEFILE_NAME)
    raster = join(auxiliary_dir, RASTER_NAME)
    polygons_layer = join(auxiliary_dir, POLYGONS_LAYER_NAME)
    input_points_layer = join(auxiliary_dir, INPUT_POINTS_LAYER_NAME)

    # Make sure none of these files already exists
    for path in [
            od_cost_matrix_layer, temp_adj_dbf, adj_dbf, partial_adj_dbf,
            polygons, raster, polygons_layer, input_points_layer,
            od_cost_matrix_lines
    ]:
        delete(path)

    # Cutoff radius for OD matrix computation
    cutoff_radius = 2 * BARRIER_COST + min(search_radius, BARRIER_COST / 2)

    # Compute OD matrix
    MakeODCostMatrixLayer_na(in_network_dataset=input_network,
                             out_network_analysis_layer=od_cost_matrix_layer,
                             impedance_attribute=impedance_attribute,
                             default_cutoff=str(cutoff_radius),
                             accumulate_attribute_name=accumulator_attributes,
                             UTurn_policy="ALLOW_UTURNS",
                             hierarchy="NO_HIERARCHY",
                             output_path_shape="NO_LINES")

    # Determine raster cell size
    points_per_raster_cell = OD_MATRIX_ENTRIES / input_point_count
    raster_cell_count = max(1, input_point_count / points_per_raster_cell)
    input_points_extent = Describe(input_points).Extent
    raster_cell_area = (input_points_extent.width *
                        input_points_extent.height / raster_cell_count)
    raster_cell_size = int(sqrt(raster_cell_area))

    # Construct |raster| from |input_points|
    PointToRaster_conversion(in_features=input_points,
                             value_field=id_attribute,
                             out_rasterdataset=raster,
                             cell_assignment="MOST_FREQUENT",
                             priority_field="NONE",
                             cellsize=str(raster_cell_size))

    # Construct |polygons| from |raster|
    RasterToPolygon_conversion(in_raster=raster,
                               out_polygon_features=polygons,
                               simplify="NO_SIMPLIFY",
                               raster_field="VALUE")

    # Export empty |od_cost_matrix_lines| to |temp_dbf| to start adjacency list
    TableToTable_conversion(in_rows=od_cost_matrix_lines,
                            out_path=output_location,
                            out_name=temp_adj_dbf_name)

    # Construct |polygons_layer| and |input_points_layer|
    for (feature, layer) in [(polygons, polygons_layer),
                             (input_points, input_points_layer)]:
        MakeFeatureLayer_management(in_features=feature, out_layer=layer)

    def add_locations(sub_layer, field_mappings=""):
        """
    |sub_layer|: one of "Origins", "Destinations", "Barrier Points"
    |field_mappings|: field mappings in addition to those for "Name" and
        "CurbApproach"
    """
        AddLocations_na(in_network_analysis_layer=od_cost_matrix_layer,
                        sub_layer=sub_layer,
                        in_table=input_points_layer,
                        field_mappings=("Name %s #; CurbApproach # 0; %s" %
                                        (id_attribute, field_mappings)),
                        search_tolerance=SEARCH_TOLERANCE,
                        search_criteria=("%s SHAPE; %s SHAPE;" %
                                         (junction_feature, edge_feature)),
                        append="CLEAR",
                        snap_to_position_along_network="SNAP",
                        snap_offset=SNAP_OFFSET)

    # OD cost matrix destinations
    AddMessage(ADDING_DESTINATIONS_STARTED)
    SelectLayerByLocation_management(in_layer=input_points_layer)
    add_locations("Destinations")
    AddMessage(ADDING_DESTINATIONS_FINISHED)

    # OD cost matrix point barriers
    AddMessage(ADDING_BARRIERS_STARTED)
    add_locations("Point Barriers",
                  ("FullEdge # 0; BarrierType # 2;"
                   "Attr_%s %s #;" %
                   (impedance_attribute, trim(BARRIER_COST_FIELD))))
    AddMessage(ADDING_BARRIERS_FINISHED)

    # Compute adjacency list, one raster cell at a time
    progress = Progress_Bar(raster_cell_count, 1, STEP_1)
    rows = UpdateCursor(polygons)
    for row in rows:
        # Select the current polygon
        SelectLayerByAttribute_management(in_layer_or_view=polygons_layer,
                                          selection_type="NEW_SELECTION",
                                          where_clause="FID = %s" %
                                          str(row.FID))

        # Origins
        SelectLayerByLocation_management(in_layer=input_points_layer,
                                         select_features=polygons_layer)
        add_locations("Origins")

        # Solve OD Cost matrix
        Solve_na(in_network_analysis_layer=od_cost_matrix_layer,
                 ignore_invalids="SKIP")

        # Add origin and destination fields to the adjacency list dbf
        for (index, field) in [(0, ORIGIN_ID_FIELD_NAME),
                               (1, DESTINATION_ID_FIELD_NAME)]:
            CalculateField_management(in_table=od_cost_matrix_lines,
                                      field=field,
                                      expression="!Name!.split(' - ')[%d]" %
                                      index,
                                      expression_type="PYTHON")

        # Record actual distance between neighboring nodes
        distance_field = "Total_%s" % impedance_attribute
        CalculateField_management(in_table=od_cost_matrix_lines,
                                  field=distance_field,
                                  expression="!%s! - 2 * %d" %
                                  (distance_field, BARRIER_COST),
                                  expression_type="PYTHON")

        # Append result to |temp_adj_dbf|
        TableToTable_conversion(in_rows=od_cost_matrix_lines,
                                out_path=auxiliary_dir,
                                out_name=PARTIAL_ADJACENCY_LIST_NAME)
        Append_management(inputs=partial_adj_dbf,
                          target=temp_adj_dbf,
                          schema_type="TEST")

        progress.step()

    # Copy data from |temp_adj_dbf| to |adj_dbf|
    Rename_management(in_data=temp_adj_dbf, out_data=adj_dbf)

    # Clean up
    for path in [
            od_cost_matrix_layer, partial_adj_dbf, polygons, raster,
            polygons_layer, input_points_layer, auxiliary_dir
    ]:
        delete(path)
Example #43
0
def checkFeatureLocations(pathsInfoObject):
    gdb = pathsInfoObject.gdbPath
    fcList = pathsInfoObject.fcList
    esb = pathsInfoObject.esbList
    version = pathsInfoObject.gdbVersion

    RoadAlias = join(gdb, "RoadAlias")

    if RoadAlias in fcList:
        fcList.remove(RoadAlias)

    userMessage("Checking feature locations...")

    #get today's date
    today = strftime("%m/%d/%y")
    values = []

    #make sure features are all inside authoritative boundary

    #get authoritative boundary
    authBound = path.join(gdb, "NG911", "AuthoritativeBoundary")
    ab = "ab"

    MakeFeatureLayer_management(authBound, ab)

    for fullPath in fcList:
        fl = "fl"
        if version == "10":
            MakeFeatureLayer_management(fullPath, fl)
        else:
            if "RoadCenterline" in fullPath:
                wc = "SUBMIT = 'Y' AND EXCEPTION not in ('EXCEPTION INSIDE', 'EXCEPTION BOTH')"
            else:
                wc = "SUBMIT = 'Y'"
            MakeFeatureLayer_management(fullPath, fl, wc)

        try:

            #select by location to get count of features outside the authoritative boundary
            SelectLayerByLocation_management(fl, "WITHIN", ab)
            SelectLayerByAttribute_management(fl, "SWITCH_SELECTION", "")
            #get count of selected records
            result = GetCount_management(fl)
            count = int(result.getOutput(0))

            #report results
            if count > 0:
                layer = basename(fullPath)
                if layer in esb:
                    layerName = "ESB"
                else:
                    layerName = layer
                id1 = getUniqueIDField(layerName.upper())
                report = "Feature not inside authoritative boundary"
                if id1 != '':
                    with SearchCursor(fl, (id1)) as rows:
                        for row in rows:
                            fID = row[0]
                            val = (today, report, layer, " ", fID)
                            values.append(val)
                else:
                    userMessage("Could not process features in " + fullPath)
            else:
                userMessage( fullPath + ": all records inside authoritative boundary")
        except:
            userMessage("Could not check locations of " + fullPath)

        finally:

            #clean up
            Delete_management(fl)

    if values != []:
        RecordResults("fieldValues", values, gdb)


    userMessage("Completed check on feature locations: " + str(len(values)) + " issues found")