Пример #1
0
def GetStartingLine(glfID, rhID):
    with da.SearchCursor(pointFC, ['SHAPE@', 'NodeID', 'NodeType'], 'NodeType = {0} OR NodeType = {1}'.format(glfID, rhID)) as cursorOrigin:
        for pointOrigin in cursorOrigin:
            # 如果是锅炉房点,则遍历出水管
            if pointOrigin[2] == glfID:
                hltype = 1
                with da.UpdateCursor(LineFC, ['SHAPE@', 'LineID', 'S_NodeID', 'E_NodeID'], 'HLType = {0}'.format(hltype)) as cursorL:
                    for lineRL in cursorL:
                        if lineRL[2] == None and lineRL[3] == None:
                            if lineRL[0].touches(pointOrigin[0]) == True:
                                # 起点编号
                                lineRL[2] = pointOrigin[1]
                                # 终点编号
                                with da.SearchCursor(pointFC, ['SHAPE@', 'NodeID'], 'NodeType <> {0}'.format(pointOrigin[2])) as cursorP:
                                    for point in cursorP:
                                        if lineRL[0].touches(point[0]) == True:
                                            lineRL[3] = point[1]
                                            cursorL.updateRow(lineRL)
                                            print(u'出水管<{0}>:起点 <{1}>,终点 <{2}>.'.format(lineRL[1],lineRL[2],lineRL[3]))
            # 入户点,则遍历回水管
            elif pointOrigin[2] == rhID:
                hltype = 0
                with da.UpdateCursor(LineFC, ['SHAPE@', 'LineID', 'S_NodeID', 'E_NodeID'], 'HLType = {0}'.format(hltype)) as cursorL:
                    for lineRL in cursorL:
                        if lineRL[2] == None and lineRL[3] == None:
                            if lineRL[0].touches(pointOrigin[0]) == True:
                                # 起点编号
                                lineRL[3] = pointOrigin[1]
                                # 终点编号
                                with da.SearchCursor(pointFC, ['SHAPE@', 'NodeID'], 'NodeType <> {0}'.format(pointOrigin[2])) as cursorP:
                                    for point in cursorP:
                                        if lineRL[0].touches(point[0]) == True:
                                            lineRL[2] = point[1]
                                            cursorL.updateRow(lineRL)
                                            print(u'回水管<{0}>:起点 <{1}>,终点 <{2}>.'.format(lineRL[1],lineRL[2],lineRL[3]))
Пример #2
0
def UpdateLineAttribute():
    # 遍历的 lineRL2 出水管的起点编号和终点编号都 非空
    fields = ['SHAPE@', 'LineID', 'S_NodeID', 'E_NodeID', 'HLType']
    with da.SearchCursor(LineFC, fields, 'S_NodeID IS NOT NULL AND E_NodeID IS NOT NULL') as cursorL2:
        for lineRL2 in cursorL2:
            # 遍历的 lineRL1 出水管的起点编号和终点编号都 空
            with da.UpdateCursor(LineFC, fields, 'HLType = {0} AND S_NodeID IS NULL AND E_NodeID IS NULL'.format(lineRL2[4])) as cursorL1:
                for lineRL1 in cursorL1:
                    # 如果两条线相连接
                    if lineRL1[0].touches(lineRL2[0]):
                        if lineRL1[4] == 1:         # 出水管起点
                            lineRL1[2] = lineRL2[3]
                        elif lineRL1[4] == 0:       # 回水管终点
                            lineRL1[3] = lineRL2[3]
                        with da.SearchCursor(pointFC, ['SHAPE@', 'NodeID']) as cursorP2:
                            # 遍历所有管点
                            for point2 in cursorP2:
                                # 如果管点与空线touch,且不是线起点,则为终点赋值
                                if point2[0].touches(lineRL1[0]) and lineRL1[2] != point2[1]:
                                    if lineRL1[4] == 1:        # 出水管
                                        lineRL1[3] = point2[1]
                                    elif lineRL1[4] == 0:      # 回水管
                                        lineRL1[2] = point2[1]
                                    cursorL1.updateRow(lineRL1)
                                    print(u'## <{0}>: 起点 <{1}>,终点 <{2}>.'.format(lineRL1[1], lineRL1[2], lineRL1[3]))
            del lineRL1
            del cursorL1
Пример #3
0
def unique_values(table, field):
    """gets a list of unique values from a table's column
    table: path to a table
    field: string of a field name

    output:
       list
    """
    try:
        if has_pandas:
            uvalues = None
            chunk_size = calc_chunk_size()
            with da.SearchCursor(table, [field]) as cursor:
                for group in grouper_it(chunk_size, cursor):
                    df = pd.DataFrame.from_records(group,
                                                   columns=cursor.fields)
                    column = df[field].unique()
                    if uvalues is None:
                        uvalues = column
                    else:
                        uvalues = np.concatenate([column, uvalues])
                    del group
                    del df
                    del column
                del cursor
            if uvalues is None:
                return []
            return list(set(uvalues.tolist()))
        else:
            desc = arcpy.Describe(table)
            if desc.hasOID:
                oidFieldname = desc.OIDFieldName
            else:
                raise Exception("Table must have an object id table")
            template = da.FeatureClassToNumPyArray(
                table, [field],
                where_clause="{ofield} < 1".format(ofield=oidFieldname))
            uvalues = None
            chunk_size = calc_chunk_size()
            with da.SearchCursor(table, [field]) as cursor:
                for group in grouper_it(chunk_size, cursor):
                    df = np.fromiter(group, template.dtype, -1)
                    column = np.unique(df[field])
                    if uvalues is None:
                        uvalues = column
                    else:
                        uvalues = np.unique(np.concatenate([column, uvalues]))
            if uvalues is None:
                return []
            return list(set(uvalues.tolist()))
    except:
        line, filename, synerror = trace()
        raise FunctionError({
            "function": "unique_values",
            "line": line,
            "filename": __file__,
            "synerror": synerror,
            "arc": str(arcpy.GetMessages(2))
        })
Пример #4
0
def score(oids, grid_features, line_features):
    line_layer = 'line_layer'
    arcpy.MakeFeatureLayer_management(line_features, line_layer)

    results = {}
    query = '{0} IN {1}'.format(
        arcpy.Describe(grid_features).oidFieldName, str(tuple(oids)))
    with da.SearchCursor(grid_features, ('OID@', 'SHAPE@'), query) as cursor:
        for row in cursor:
            oid = row[0]
            grid_geometry = row[1]

            # select intersecting lines
            arcpy.SelectLayerByLocation_management(line_layer, 'INTERSECT',
                                                   grid_geometry)

            # build stats
            stats = []
            has_match = False
            with da.SearchCursor(line_layer, ('OFFSET_METERS')) as line_cursor:
                for line_row in line_cursor:
                    has_match = True
                    if line_row[0] > 0:
                        stats.append(line_row[0])

            # calc mean
            if len(stats) > 0:
                mean = np.mean(stats)
            elif has_match:
                mean = -2
            else:
                mean = -1

            # assign score
            if mean > 0:
                if mean >= 0 and mean <= 5:
                    value = 5
                elif mean > 5 and mean <= 10:
                    value = 4
                elif mean > 10 and mean <= 15:
                    value = 3
                elif mean > 15 and mean <= 20:
                    value = 2
                else:
                    value = 1
            elif mean == -1:
                # no samples
                value = 0
            elif mean == -2:
                # no parallel matches
                value = 6

            results[oid] = [mean, value]
    return results
Пример #5
0
def ProjDelete(
):  #delete rows from the FMIS load table that are about to be processed
    delcount = GetCount_management(
        r'Database Connections\CANT_CPMS.sde\CPMS.CPMS_FMIS_GIS_DEL_ROWS')
    print str(delcount) + " records to delete"
    deletelist = list()
    if delcount == 0:
        print "no records to delete, ending"
        pass
    else:
        MakeTableView_management(FMIS_LOAD, "FMIS_TABLE")
        MakeTableView_management(deltbl, "deletes")
        with da.SearchCursor(deltbl,
                             "PROJECT_NUMBER") as delcur:  # @UndefinedVariable
            for row in delcur:
                DelXID = ("{0}".format(row[0]))
                #print DelXID + " is being deleted from the FMIS table"
                #AddJoin_management(layer_name,"CROSSINGID", deltbl, "CROSSINGID", "KEEP_ALL")
                #delsel = "PROJECT_NUMBER LIKE '"+DelXID+"'"
                #print delsel
                deletelist.append(DelXID)
                #SelectLayerByAttribute_management("FMIS_TABLE","ADD_TO_SELECTION", delsel)
        #print str(deletelist)
        delsel = "PROJECT_NUMBER IN " + str(deletelist)
        #for ProjectNumber in deletelist:
        print delsel

        SelectLayerByAttribute_management("FMIS_TABLE", "NEW_SELECTION",
                                          delsel)
        #DeleteRows_management("FMIS_TABLE")
    print "Delete function completed"
def generateCcCombinedRoutesFc():
    """The city center routes are split into a few feature classes for the various
	modes of transportation, combine them into a unified one"""

    geom_type = 'POLYLINE'
    template = os.path.join(env.workspace, 'frequent_bus_carto')
    oregon_spn = arcpy.SpatialReference(2913)
    combined_routes_cc = os.path.join(cc_shapefiles, 'combined_routes_cc.shp')
    management.CreateFeatureclass(os.path.dirname(combined_routes_cc),
                                  os.path.basename(combined_routes_cc),
                                  geom_type,
                                  template,
                                  spatial_reference=oregon_spn)

    name_field = 'LINE'
    route_fields = ['Shape@', 'routes', 'serv_level', 'route_type']
    i_cursor = da.InsertCursor(combined_routes_cc, route_fields)

    for fc in arcpy.ListFeatureClasses(feature_type='Polyline'):
        if name_field in [f.name for f in arcpy.ListFields(fc)]:
            assignRouteNumbersToRail(fc, name_field, route_fields[1])

        with da.SearchCursor(fc, route_fields) as cursor:
            for row in cursor:
                i_cursor.insertRow(row)

    del i_cursor
def get_image_paths(in_mosaic):
    temp_image_table = path.join("in_memory", "temp_image_table")
    ExportMosaicDatasetPaths(in_mosaic, temp_image_table, '', "ALL",
                             "RASTER;ITEM_CACHE")
    images = set([row[0] for row in da.SearchCursor(temp_image_table, "Path")])
    Delete(temp_image_table)
    return images
Пример #8
0
def DeleteDeletedRows(FMIS_DEL, FMIS_LOAD):
    #delete rows from the FMIS table programmed to be deleted according to the CPMS view
    MakeTableView_management(FMIS_DEL, "RowstoDelete")
    MakeTableView_management(FMIS_LOAD, "DeleteThese")
    delcount = GetCount(FMIS_DEL)
    #delete rows from SDE CIIMS that are removed from CANSYS CIIMS
    #search cursor to match the crossing ID in the delete view
    SetLogHistory(False)
    DeleteList = []
    with da.SearchCursor("RowstoDelete",
                         "PROJECT_NUMBER") as delcur:  # @UndefinedVariable
        for row in delcur:
            DelXID = ("{0}".format(row[0]))
            DeleteList.append(DelXID)
    print "list completed"
    for record in DeleteList:
        #print DelXID + " is being deleted from the FMIS table"
        #add the the crossing ID for the row to be deleted to a selection set
        delsel = "PROJECT_NUMBER LIKE '%s'" % record
        #print delsel
        SelectLayerByAttribute_management("DeleteThese", "ADD_TO_SELECTION",
                                          delsel)
    #delete the selected rows
    DeleteRows_management("DeleteThese")
    del FMIS_DEL, FMIS_LOAD, delsel
    print "Delete function completed"
def createCcBusLabelsFc():
    """The offset routes for the city center have only one set of geometries for
	each service level, but there needs to be labels for each line so generate a 
	unique geometry for each of the routes the line segments represent"""

    geom_type = 'POLYLINE'
    template = os.path.join(sm_shapefiles, 'distinct_routes.shp')
    oregon_spn = arcpy.SpatialReference(2913)
    bus_labels_cc = os.path.join(cc_shapefiles, 'bus_labels_cc.shp')
    management.CreateFeatureclass(os.path.dirname(bus_labels_cc),
                                  os.path.basename(bus_labels_cc),
                                  geom_type,
                                  template,
                                  spatial_reference=oregon_spn)

    i_fields = ['Shape@', 'route_id', 'serv_level', 'route_type']
    i_cursor = da.InsertCursor(bus_labels_cc, i_fields)

    s_fields = i_fields[:]
    s_fields[1] = 'routes'
    for fc in arcpy.ListFeatureClasses():
        if 'bus' in fc:
            with da.SearchCursor(fc, s_fields) as cursor:
                routes_ix = cursor.fields.index('routes')
                for row in cursor:
                    for route in row[routes_ix].split(','):
                        new_row = list(row)
                        new_row[routes_ix] = route

                        i_cursor.insertRow((new_row))

    del i_cursor
def replace_null_values(fc,
                        fields="*",
                        oid_field=None,
                        null_value=0,
                        where_clause=None):
    """updates a set of rows in chunks

    """
    if fields is None or \
       isinstance(fields, list) == False or \
       fields == "*":
        fields = [field.name for field in arcpy.ListFields(fc) \
                  if field.type not in ('Geometry', 'Blob', 'Raster')]
    if oid_field is None:
        oid_field = arcpy.Describe(fc).OIDFieldName
    chunk_size = calc_chunk_size()
    if oid_field not in fields:
        fields.append(oid_field)
    with da.SearchCursor(fc, fields, where_clause=where_clause) as cursor:
        search_fields = [
            field for field in cursor.fields if field != oid_field
        ]
        for group in grouper_it(chunk_size, cursor):
            df = pd.DataFrame.from_records(group, columns=cursor.fields)
            for field in search_fields:
                df.loc[df[field].isnull(), field] = null_value
                del field
            array = df.to_records(index=False)
            da.ExtendTable(fc, oid_field, array, oid_field, False)
            del array
    return fc
Пример #11
0
def OffsetDirectionNearTable():
    from arcpy import GenerateNearTable_analysis, da, AddFieldDelimiters
    GeocodedLayer = 'Geocoding Result: Geocoding_Result_9'
    IntersectLayer = 'Geocoding_Result_9_Buffer_In'

    CursorFieldList = [
        'OBJECTID', 'ACCIDENT_KEY', 'X', 'Y', 'AT_ROAD_KDOT_DIRECTION',
        'POINT_X', 'POINT_Y'
    ]
    #  cursor to add to list the Accident IDs and Object IDs

    CoordFinder = da.SearchCursor(IntersectLayer,
                                  CursorFieldList)  # @UndefinedVariable
    coordlist = []
    rowDictionary = dict()

    for row in CoordFinder:
        print
        #print('{0}, {1}, {2}, {3}, {4}'.format(row[0], row[1], row[2], row[3], row[4]))
        if str(row[2]) == "E":
            print row[0]
            EastCoord = max(row[0], row[3])
            coordlist.append(EastCoord)
            rowDictionary
    for i in rowDictionary:
        print str(i)
Пример #12
0
def OffsetDirectionMatrix1():
    #select the intersected coordinate that best describes the reported location of the on road from the intersection based on the CrashOffsetPoints function
    from arcpy import AddXY_management, AddJoin_management, ListFields, da, SelectLayerByAttribute_management, AddFieldDelimiters
    GeocodedLayer = 'Geocoding Result: Geocoding_Result_9'
    IntersectLayer = 'Geocoding_Result_9_Buffer_In'
    AddXY_management(IntersectLayer)
    AddJoin_management(IntersectLayer, "ACCIDENT_KEY", GeocodedLayer,
                       "ACCIDENT_KEY")
    CursorFieldList = [
        'X', 'Y', 'AT_ROAD_KDOT_DIRECTION', 'POINT_X', 'POINT_Y', 'OBJECTID',
        'ACCIDENT_KEY'
    ]
    #  cursor to add to list the Accident IDs and Object IDs

    CoordFinder = da.SearchCursor(IntersectLayer,
                                  CursorFieldList)  # @UndefinedVariable
    rowDictionary = dict()
    for row in CoordFinder:
        rowDictionary[row[5]] = row
    try:
        del CoordFinder
    except:
        print "cursor hung"
    for keyname in rowDictionary.keys():
        rowOfInterest = rowDictionary[keyname]
        if str(rowOfInterest[2]) == "E":
            print str(rowOfInterest[2])
            coordlist = []
            OffsetCoord = rowOfInterest[3]
            coordlist.append(OffsetCoord)
            print coordlist
    FinalCoordinate = max(coordlist)
    FinalCoordInt = int(FinalCoordinate)

    print FinalCoordinate
Пример #13
0
def OffsetDirectionMatrix(gdb):
    #select the intersected coordinate that best describes the reported location of the on road from the intersection based on the CrashOffsetPoints function
    from arcpy import AddXY_management, AddJoin_management, ListFields, da, SelectLayerByAttribute_management, AddFieldDelimiters
    GeocodedLayer = 'Geocoding Result: Geocoding_Result_9'
    IntersectLayer = 'Geocoding_Result_9_Buffer_In'
    AddXY_management(IntersectLayer)
    AddJoin_management(IntersectLayer, "ACCIDENT_KEY", GeocodedLayer,
                       "ACCIDENT_KEY")
    FieldsList = ListFields(IntersectLayer)
    CursorFieldList = [
        'X', 'Y', 'AT_ROAD_KDOT_DIRECTION', 'POINT_X', 'POINT_Y', 'OBJECTID',
        'ACCIDENT_KEY'
    ]
    #  cursor to add to list the Accident IDs and Object IDs

    CoordFinder = da.SearchCursor(IntersectLayer,
                                  CursorFieldList)  # @UndefinedVariable
    coordlist = []
    rowDictionary = dict()

    for row in CoordFinder:
        print
        #print('{0}, {1}, {2}, {3}, {4}'.format(row[0], row[1], row[2], row[3], row[4]))
        if str(row[2]) == "E":
            print row[0]
            EastCoord = max(row[0], row[3])
            coordlist.append(EastCoord)
            rowDictionary
    print coordlist
    FinalEastCoordinate = max(coordlist)
    FinalEastCoordInt = int(FinalEastCoordinate)
    print FinalEastCoordinate
    CoordSelExpression = 'POINT_X -' + str(FinalEastCoordInt) + " < 1"
    SelectLayerByAttribute_management(IntersectLayer, "NEW_SELECTION",
                                      CoordSelExpression)
Пример #14
0
 def get_classes(self):
     """Get the number of classes in the shapefile"""
     row_val = []
     with da.SearchCursor(self.get_json_keys()[0],[self.get_json_keys()[4]]) as sc: #search on the class field
         print "Wait... calculating number of classes"
         for row in sc:
             row_val.append(int(row[0]))
     return max(row_val)            
Пример #15
0
def getRouteServicePairs():
    """Get all unique line-service level combinations in the routes feature class"""

    route_service_list = []
    s_fields = ['route_id', 'serv_level']
    with da.SearchCursor(distinct_routes, s_fields) as s_cursor:
        for row in s_cursor:
            if row not in route_service_list:
                route_service_list.append(row)

    return route_service_list
def getModeServicePairs():
    """Get all unique line-service level combinations in the routes feature class"""

    mode_service_list = []
    s_fields = ['route_type', 'serv_level']
    with da.SearchCursor(serv_level_routes, s_fields) as s_cursor:
        for row in s_cursor:
            if row not in mode_service_list:
                mode_service_list.append(row)

    return mode_service_list
Пример #17
0
def getRowFromShareId(shareId):
    global shareTable

    fields = ['SHAREID', 'JSON']
    foundRow = None
    where = "SHAREID = '" + shareId + "'"

    with da.SearchCursor(shareTable, fields, where) as cursor:
        for row in cursor:
            foundRow = row

    return foundRow
def commonline(linefc, pointfc, linetype):
    fields = ['SHAPE@', 'LineID', 'S_NodeID', 'E_NodeID']
    with da.SearchCursor(
            linefc, fields,
            'HLType = {0} AND S_NodeID IS NOT NULL AND E_NodeID IS NOT NULL'.
            format(linetype)) as cursorL2:
        for lineRL2 in cursorL2:
            with da.UpdateCursor(
                    linefc, fields,
                    'HLType = {0} AND S_NodeID IS NULL AND E_NodeID IS NULL'.
                    format(linetype)) as cursorL1:
                for lineRL1 in cursorL1:
                    if lineRL1[0].touches(lineRL2[0]):
                        with da.SearchCursor(pointfc,
                                             ['SHAPE@', 'NodeID']) as cursorP:
                            for point in cursorP:
                                if point[0].touches(
                                        lineRL1[0]) and point[0].touches(
                                            lineRL2[0]):
                                    lineRL1[2] = point[1]
                                    with da.SearchCursor(
                                            pointfc,
                                        ['SHAPE@', 'NodeID']) as cursorP2:
                                        for point2 in cursorP2:
                                            if point2[0].touches(
                                                    lineRL1[0]
                                            ) and lineRL1[2] != point2[1]:
                                                lineRL1[3] = point2[1]
                                                cursorL1.updateRow(lineRL1)
                                                print(
                                                    u'  出水管<{0}>:起点<{1}>,终点<{2}>.'
                                                    .format(
                                                        lineRL1[1], lineRL1[2],
                                                        lineRL1[3]))
                                    del cursorP2
                        del cursorP
            del cursorL1
    del cursorL2
Пример #19
0
def checkFeatureLocations(gdb):
    userMessage("Checking feature locations...")
    from os import path
    from arcpy import MakeFeatureLayer_management, SelectLayerByAttribute_management, SelectLayerByLocation_management, GetCount_management, Delete_management, da

    values = []
    #make sure feature are all inside authoritative boundary

    #get authoritative boundary
    authBound = path.join(gdb, "NG911", "AuthoritativeBoundary")
    ab = "ab"

    MakeFeatureLayer_management(authBound, ab)

    for dirpath, dirnames, filenames in da.Walk(gdb, True, '', False,
                                                ["FeatureClass"]):
        for filename in filenames:
            if filename != "AuthoritativeBoundary":
                #get full path name & create a feature layer
                fullPath = path.join(gdb, filename)
                fl = "fl"
                MakeFeatureLayer_management(fullPath, fl)

                #select by location to get count of features outside the authoritative boundary
                SelectLayerByLocation_management(fl, "INTERSECT", ab)
                SelectLayerByAttribute_management(fl, "SWITCH_SELECTION", "")
                #get count of selected records
                result = GetCount_management(fl)
                count = int(result.getOutput(0))

                #report results
                if count > 0:
                    fields = ("OBJECTID")
                    with da.SearchCursor(fl, fields) as rows:
                        for row in rows:
                            val = (today,
                                   "Feature not inside authoritative boundary",
                                   filename, "", row[0])
                            values.append(val)
                else:
                    userMessage(filename +
                                ": all records inside authoritative boundary")

                #clean up
                Delete_management(fl)

    userMessage("Completed check on feature locations")

    if values != []:
        RecordResults("fieldValues", values, gdb)
Пример #20
0
def PointCheck(fc, fields, layer_name):
    # CIIMS CHECK will report any situations where the latitude or longitude attributes do not match the point location
    MakeFeatureLayer_management(fc, layer_name)
    with da.SearchCursor(fc, fields) as cursor:
        for row in cursor:
            row0x = str(round(row[0], 8))
            row0y = str(round(row[1], 8))
            rowx, rowy = (row[2])
            rowxr = round(rowx, 8)
            rowyr = round(rowy, 8)
            if row0x == str(rowxr) and row0y == str(rowyr):
                pass
            else:
                print(row), row0x, rowxr, row0y, rowyr
    del fc, layer_name, fields
def endingline(linefc, pointfc, nodetype, linetype):
    with da.SearchCursor(pointfc, ['SHAPE@', 'NodeID'],
                         'NodeType = {0}'.format(nodetype)) as cursorG:
        for pointGLF in cursorG:
            with da.UpdateCursor(
                    linefc, ['SHAPE@', 'LineID', 'S_NodeID', 'E_NodeID'],
                    'HLType = {0} AND S_NodeID IS NULL AND E_NodeID IS NULL'.
                    format(linetype)) as cursorL:
                for lineRL in cursorL:
                    if lineRL[0].touches(pointGLF[0]):
                        lineRL[3] = pointGLF[1]
                        with da.SearchCursor(
                                pointfc, ['SHAPE@', 'NodeID'],
                                'NodeType <> {0}'.format(nodetype)) as cursorP:
                            for point in cursorP:
                                if lineRL[0].touches(point[0]):
                                    lineRL[2] = point[1]
                                    cursorL.updateRow(lineRL)
                                    print(
                                        u'  回水管<{0}>:起点<{1}>,终点<{2}>.'.format(
                                            lineRL[1], lineRL[2], lineRL[3]))
                        del cursorP
            del cursorL
    del cursorG
Пример #22
0
def PointCheck(fc, fields, layer_name):
    # Use this to check any situations where the latitude or longitude attributes do not match the point location
    MakeFeatureLayer_management(fc, layer_name)
    with da.SearchCursor(fc, fields) as cursor:
        for row in cursor:
            rowx, rowy = (row[2])
            intolX = abs(row[0] - rowx)
            intolY = abs(row[1] - rowy)
            Tolerance = 0.00000001
            if intolX < Tolerance and intolY < Tolerance:
                pass
            else:
                print(row), row0x, rowxr, row0y, rowyr
    del fc, layer_name, fields
    print "all other points within tolerance"
Пример #23
0
def replace_values(fc,
                   fields="*",
                   oid_field=None,
                   find_value=None,
                   replace_value=0,
                   where_clause=None):
    """updates a set of rows in chunks

    """
    try:
        if fields is None or \
           isinstance(fields, list) == False or \
           fields == "*":
            fields = [field.name for field in arcpy.ListFields(fc) \
                      if field.type not in ('Geometry', 'Blob', 'Raster')]
        if oid_field is None:
            oid_field = arcpy.Describe(fc).OIDFieldName
        chunk_size = calc_chunk_size()
        if oid_field not in fields:
            fields.append(oid_field)
        with da.SearchCursor(fc, fields, where_clause=where_clause) as cursor:
            search_fields = [
                field for field in cursor.fields if field != oid_field
            ]
            for group in grouper_it(chunk_size, cursor):
                df = pd.DataFrame.from_records(group, columns=cursor.fields)
                for field in search_fields:
                    if  find_value is None or \
                        str(find_value).lower() == "none" or \
                        str(find_value).lower().strip() == "":
                        df.loc[df[field].isnull(), field] = replace_value
                    else:
                        df.loc[df[field] == find_value, field] = replace_value
                    del field
                array = df.to_records(index=False)
                da.ExtendTable(fc, oid_field, array, oid_field, False)
                del array
                del df
        return fc
    except:
        line, filename, synerror = trace()
        raise FunctionError({
            "function": "replace_values",
            "line": line,
            "filename": filename,
            "synerror": synerror,
            "arc": str(arcpy.GetMessages(2))
        })
def process():
    # Detect Unit of Measurement (Feet -vs- Meter)
    cell_factor = getCellFactor(in_mosaic_dataset)

    # Obatin List of Raster Files in Mosaic Dataset
    temp_table = join("memory", "temp_table")
    ExportMosaicDatasetPaths(in_mosaic_dataset, temp_table, "#", "ALL", "RASTER")
    rasters = set(row[0] for row in da.SearchCursor(temp_table, "Path"))
    Delete(temp_table)

    if not exists(out_directory):
        makedirs(out_directory)

    # Process each raster
    for in_raster in rasters:
        root_dir, file = split(in_raster)
        AddMessage("da filename is: {}".format(file))
        out_raster = join(out_directory, file)

        desc = Describe(in_raster)
        cell_size_height = desc.children[0].meanCellHeight  # Cell size in the Y axis and / or
        cell_size_width = desc.children[0].meanCellWidth  # Cell size in the X axis
        cell_size = "{0} {1}".format(cell_size_height*cell_factor, cell_size_width*cell_factor)

        if unitsCalc(in_mosaic_dataset) == "Foot":
            outTimes = Times(in_raster, 0.3048)
            ProjectRaster(in_raster=outTimes,
                          out_raster=out_raster,
                          out_coor_system=out_spatial_reference,
                          resampling_type=resampling_type,
                          cell_size=cell_size,
                          geographic_transform=geographic_transform,
                          in_coor_system=input_spatial_reference)
        else:
            ProjectRaster(in_raster=in_raster,
                          out_raster=out_raster,
                          out_coor_system=out_spatial_reference,
                          resampling_type=resampling_type,
                          cell_size=cell_size,
                          geographic_transform=geographic_transform,
                          in_coor_system=input_spatial_reference)

    # Delete Intermediate Data
    del rasters
    if out_mosaic_dataset:
        root_dir, file = split(out_mosaic_dataset)
        # TODO: Automatically detect Pixel Type from input Mosaic Dataset Rasters and pass below
        createMosaics(root_dir, file, out_directory, out_spatial_reference, "32_BIT_UNSIGNED")
Пример #25
0
def PointDelete(
    fc, layer_name, deltbl, table_name
):  #delete rows from SDE CIIMS that are removed from CANSYS CIIMS
    MakeFeatureLayer_management(fc, layer_name)
    MakeTableView_management(deltbl, table_name)
    with da.SearchCursor(deltbl, "CROSSINGID") as delcur:
        for row in delcur:
            DelXID = ("{0}".format(row[0]))
            print DelXID + " is being deleted from the CIIMS table"
            #AddJoin_management(layer_name,"CROSSINGID", deltbl, "CROSSINGID", "KEEP_ALL")
            delsel = "CROSSINGID LIKE '" + str(row)[3:10] + "'"
            SelectLayerByAttribute_management(layer_name, "ADD_TO_SELECTION",
                                              delsel)
    DeleteRows_management(layer_name)
    del fc, layer_name, deltbl, table_name
    print "Delete function completed"
Пример #26
0
def PointDelete(fc, layer_name, deltbl, table_name):
    #delete rows from SDE CIIMS that are removed from CANSYS CIIMS
    MakeFeatureLayer_management(fc, layer_name)
    MakeTableView_management(deltbl, table_name)
    #search cursor to match the crossing ID in the delete view
    with da.SearchCursor(deltbl, "CROSSINGID") as delcur:  # @UndefinedVariable
        for row in delcur:
            DelXID = ("{0}".format(row[0]))
            print DelXID + " is being deleted from the CIIMS table"
            #add the the crossing ID for the row to be deleted to a selection set
            delsel = "CROSSINGID LIKE '" + str(row)[3:10] + "'"
            SelectLayerByAttribute_management(layer_name, "ADD_TO_SELECTION",
                                              delsel)
    #delete the selected rows
    DeleteRows_management(layer_name)
    del fc, layer_name, deltbl, table_name
    print "Delete function completed"
Пример #27
0
def main():

    inTable = arcpy.GetParameterAsText(0)
    fileLocation = arcpy.GetParameterAsText(1)

    with da.SearchCursor(inTable,
                         ['DATA', 'ATT_NAME', 'ATTACHMENTID']) as cursor:
        for item in cursor:
            attachment = item[0]
            #filenum = "ATT" + str(item[2]) + "_"
            #filename = filenum + str(item[1])
            filename = str(item[1])
            open(fileLocation + os.sep + filename,
                 'wb').write(attachment.tobytes())
            del item
            #del filenum
            del filename
            del attachment
Пример #28
0
def populateUnifiedFc():
    """Iterate through all of the individual route feature classes and add them
	to common fc"""

    route_fields = ['Shape@', 'route_id', 'serv_level', 'route_type']
    i_cursor = da.InsertCursor(offset_routes, route_fields)

    feat_datasets = ['frequent', 'standard', 'rush_hour', 'rail_tram']
    for fd in feat_datasets:
        for fc in arcpy.ListFeatureClasses(feature_dataset=fd):
            # exclude these as a different more generalized fc is being used
            # to represent the streetcar
            print fc
            fc_path = os.path.join(env.workspace, fd, fc)
            with da.SearchCursor(fc_path, route_fields) as s_cursor:
                for row in s_cursor:
                    i_cursor.insertRow(row)

    del i_cursor
Пример #29
0
    def from_featureclass(cls, filepath):
        """
        Alternate constructor to create a GeoSeries from a feature class
        Inputs:
         filename: file path to the feature class
        Ouput:
         GeoSeries
        """
        if arcpy.Exists(filepath) == False:
            raise ValueError("Feature class: %s does not exist" % filepath)
        desc = arcpy.Describe(filepath)
        if hasattr(desc, "shapeFieldName") == False:
            raise ValueError("Input must have a geometry column")
        fields = [desc.shapeFieldName + "@"]
        geoms = [row[0] for row in da.SearchCursor(filepath, fields)]

        g = GeoSeries(geoms)
        g.sr = arcpy.Describe(filepath).spatialReference.factoryCode
        return g
Пример #30
0
def validateGeometry():
    """Check for geometry errors and multipart features that may have been introduced
	in the manual editing process of creating the offsets"""

    # Check for geometry errors, this tool doesn't flag a lot of the aspects I'm
    # interested in, thus the other steps below
    error_table = os.path.join(temp_dir, 'carto_errors.dbf')
    management.CheckGeometry(offset_routes, error_table)

    # Identify any multipart features
    multipart_dump = os.path.join(temp_dir, 'multipart_dump.shp')
    management.MultipartToSinglepart(offset_routes, multipart_dump)

    multipart_dict = {}
    dump_fields = ['OID@', 'ORIG_FID']
    with da.SearchCursor(multipart_dump, dump_fields) as s_cursor:
        for oid, orig_id in s_cursor:
            if orig_id not in multipart_dict:
                multipart_dict[orig_id] = 1
            else:
                multipart_dict[orig_id] += 1

    print "Features with the following fid's are multipart:"
    for orig_id, count in multipart_dict.iteritems():
        if count > 1:
            print orig_id

    # Find other errors like shared geometries and deadends using the merge divided
    # roads tool, I'm not actually interested in the output of this tool but rather the
    # validation output that it generates
    merge_field, field_type = 'merge_id', 'LONG'
    management.AddField(offset_routes, merge_field, field_type)

    # validation output will be logged here (not that user name portion may be variable):
    # C:\Users\humphrig\AppData\Local\ESRI\Geoprocessing
    merge_distance = 100  # feet
    validation_merge = os.path.join('in_memory', 'validation_merge')
    cartography.MergeDividedRoads(offset_routes, merge_field, merge_distance,
                                  validation_merge)

    # drop the merge field as it no longer needed
    management.DeleteField(offset_routes, merge_field)