コード例 #1
0
def Uniquity():
    rcl = ng911 + r"/RoadCenterline"
    apt = ng911 + r"/AddressPoints"
    arcpy.FindIdentical_management(rcl, checkfile + r"/RCLDup", "SEGID", "#",
                                   "0", "ONLY_DUPLICATES")
    arcpy.FindIdentical_management(apt, checkfile + r"/AddressPointsDup",
                                   "ADDID", "#", "0", "ONLY_DUPLICATES")
コード例 #2
0
def delete_loops(lines):
    arcpy.Copy_management(lines, "Lines_Copy")
    ends = arcpy.FeatureVerticesToPoints_management(lines, "Ends", "BOTH_ENDS")
    identical = arcpy.FindIdentical_management(ends, "Identical",
                                               "SHAPE;ORIG_FID")
    arcpy.JoinField_management(ends,
                               "OBJECTID",
                               identical,
                               "IN_FID",
                               fields="FEAT_SEQ")
    dissolved_ends = arcpy.Dissolve_management(
        ends,
        "Ends_Dissolve",
        "ORIG_FID;FEAT_SEQ",
        statistics_fields="FEAT_SEQ COUNT")
    rows = arcpy.da.SearchCursor(dissolved_ends,
                                 ['ORIG_FID', 'COUNT_FEAT_SEQ'])
    for row in rows:
        if row[1] == 2:
            del_lines = arcpy.da.UpdateCursor(lines, ['OBJECTID'])
            for del_line in del_lines:
                if del_line[0] == row[0]:
                    arcpy.AddMessage("Deleting ID {0}".format(row[0]))
                    del_lines.deleteRow()
            del del_line, del_lines
    del row, rows
    arcpy.AddMessage("Artefacts are removed")
コード例 #3
0
def integrating_network(lines, tolerance="0 Meters"):
    overhead_lines = arcpy.FeatureClassToFeatureClass_conversion(
        lines, "Network", "Lines_over_p", where_clause="Line_Type = 'ВЛ'")
    cable_lines = arcpy.FeatureClassToFeatureClass_conversion(
        lines, "Network", "Lines_cable_p", where_clause="Line_Type = 'КЛ'")
    arcpy.Integrate_management(overhead_lines, tolerance)
    arcpy.Integrate_management(cable_lines, "0.1 Meters")
    lines = arcpy.Merge_management([overhead_lines, cable_lines],
                                   "Lines_merge")
    split = arcpy.SplitLine_management(lines, "SplitLine")
    find = arcpy.FindIdentical_management(
        split,
        "in_memory/Find_Ident", ["Shape", "Name", "Voltage"],
        xy_tolerance=tolerance,
        output_record_option="ONLY_DUPLICATES")
    joined_split = arcpy.JoinField_management(split, "OBJECTID", find,
                                              "IN_FID")
    arcpy.DeleteIdentical_management(joined_split,
                                     ["Shape", "Name", "Voltage"],
                                     "0.1 Meters")
    unsplit = arcpy.Dissolve_management(
        joined_split,
        "Unsplited_Lines", [
            "Name", "Voltage", "Line_Type", "Start", "End", "Circuit",
            "Operate_Name", "Trace_Version", "Status"
        ],
        multi_part="MULTI_PART")
    return unsplit
コード例 #4
0
def findDupPts(inFds,outFds):
    addMsgAndPrint('Looking for duplicate points')
    duplicatePoints = []
    arcpy.env.workspace = os.path.dirname(inFds)
    ptFcs1 = arcpy.ListFeatureClasses('','POINT',os.path.basename(inFds))
    ptFcs2 = []
    for fc in ptFcs1:
        notEdit = True
        for pfx in editPrefixes:
            if fc.find(pfx) > -1:
                notEdit = False
        if notEdit: ptFcs2.append(fc)
    for fc in ptFcs2:
        addMsgAndPrint('  finding duplicate records in '+fc)
        newTb = os.path.dirname(outFds)+'/dups_'+fc
        testAndDelete(newTb)
        dupFields = ['Shape']
        allFields1 = arcpy.ListFields(fc)
        allFields = []
        for fld in allFields1:
            allFields.append(fld.name)
        for aF in ('Type','Azimuth','Inclination'):
            if aF in allFields:
                dupFields.append(aF)
        addMsgAndPrint('    fields to be compared: '+str(dupFields))
        arcpy.FindIdentical_management(inFds+'/'+fc,newTb,dupFields,'','','ONLY_DUPLICATES')
        addMsgAndPrint('    dups_'+fc+': '+str(numberOfRows(newTb))+' rows')
        if numberOfRows(newTb) == 0:
            testAndDelete(newTb)
        else:
            duplicatePoints.append('   '+str(numberOfRows(newTb))+' rows in '+os.path.basename(newTb))

    return duplicatePoints
コード例 #5
0
ファイル: dao_util.py プロジェクト: UGAROY/py-util
def delete_identical_only_keep_min_oid(data, fields, xy_tolerance="1 meters"):
    """
    Similar to the DeleteIdentical function in arcpy. This tool goes one more step to only keep the records with the
    smaller id
    :param data:
    :param fields:
    :param xy_tolerance:
    """
    identical_table = "in_memory\\identical_table"
    arcpy.FindIdentical_management(data, identical_table, fields, xy_tolerance,
                                   "", "ONLY_DUPLICATES")
    fseq_list = []
    delete_oid_list = []
    sCursor = arcpy.SearchCursor(identical_table, "", "", "FEAT_SEQ;IN_FID",
                                 "IN_FID A")
    for sRow in sCursor:
        feat_seq, in_fid = sRow.getValue("FEAT_SEQ"), sRow.getValue("IN_FID")
        if feat_seq not in fseq_list:
            fseq_list.append(feat_seq)
        else:
            delete_oid_list.append(in_fid)
    del sCursor
    oid_field = arcpy.Describe(data).OIDFieldName
    if len(delete_oid_list) != 0:
        where_clause = build_numeric_in_sql_expression(oid_field,
                                                       delete_oid_list)
        delete_subset_data(data, where_clause)
コード例 #6
0
def main(line, seg_length):

    arcpy.AddMessage("Plotting segment endpoints...")
    arcpy.MakeFeatureLayer_management(line, "in_line_lyr")
    fields = ["SHAPE@", "LineOID"]

    # Plot endpoints for all segments
    endPnt_all = plot_end(line, fields)
    arcpy.MakeFeatureLayer_management(endPnt_all, "endPnt_all_lyr")

    # Find duplicate endpoints
    arcpy.FindIdentical_management("endPnt_all_lyr", "dup_table", ["Shape"],
                                   0.5, "#", "ONLY_DUPLICATES")
    arcpy.MakeTableView_management(r"dup_table", "dup_tblview")
    arcpy.JoinField_management("endPnt_all_lyr", "LineOID", "dup_tblview",
                               "IN_FID", "#")
    arcpy.SelectLayerByAttribute_management("endPnt_all_lyr", "NEW_SELECTION",
                                            """"IN_FID" IS NOT NULL""")
    arcpy.FeatureClassToFeatureClass_conversion("endPnt_all_lyr", "in_memory",
                                                "endPnt_dup")

    # Find segments with duplicate endpoints
    arcpy.JoinField_management("in_line_lyr", "OBJECTID", "dup_tblview",
                               "IN_FID", "#")
    arcpy.SelectLayerByAttribute_management("in_line_lyr", "NEW_SELECTION",
                                            """"IN_FID" IS NOT NULL""")
    arcpy.FeatureClassToFeatureClass_conversion("in_line_lyr", "in_memory",
                                                "line_dup")
    arcpy.SelectLayerByAttribute_management("in_line_lyr", "SWITCH_SELECTION")
    arcpy.FeatureClassToFeatureClass_conversion("in_line_lyr", "in_memory",
                                                "line_nodup")

    # Re-plot endpoints for segments with duplicate endpoints
    endPnt_dup_final = plot_end(r"in_memory\line_dup", fields)
    arcpy.FeatureClassToFeatureClass_conversion(endPnt_dup_final, "in_memory",
                                                "endPnt_dup_final")
    endPnt_nodup_final = plot_end(r"in_memory\line_nodup", fields)
    arcpy.FeatureClassToFeatureClass_conversion(endPnt_nodup_final,
                                                "in_memory",
                                                "endPnt_nodup_final")
    finalEndpnt = arcpy.Merge_management(
        ["in_memory\endPnt_nodup_final", "in_memory\endPnt_dup_final"],
        r"in_memory\finalEndPnt")

    # clean up temp files
    arcpy.Delete_management("in_line_lyr")
    arcpy.Delete_management(r"dup_table")
    arcpy.Delete_management("dup_tblview")
    arcpy.Delete_management(endPnt_all)
    arcpy.Delete_management("endPnt_all_lyr")
    arcpy.Delete_management(r"in_memory\endPnt_dup")
    arcpy.Delete_management(r"in_memory\line_dup")
    arcpy.Delete_management(r"in_memory\line_nodup")
    arcpy.Delete_management(r"in_memory\endPnt_nodup_final")
    arcpy.Delete_management(r"in_memory\endPnt_dup_final")

    return finalEndpnt
コード例 #7
0
def selectIdentical(dataset):
    dups = os.path.join("in_memory", "tempDups")
    dups1 = arcpy.FindIdentical_management(
        dataset, dups, 'EO_ID', output_record_option="ONLY_DUPLICATES")

    with arcpy.da.SearchCursor(dups1, 'IN_FID') as cursor:
        duplicates = sorted({row[0] for row in cursor})

    query = '"OBJECTID" IN ' + str(tuple(duplicates))

    arcpy.SelectLayerByAttribute_management(dataset, "ADD_TO_SELECTION", query)
コード例 #8
0
 def onClick(self):
     workspace, PFname = os.path.split(parcelFabric)
     ParcelPoints = "FabricInvestigation\\Points"
     DuplicatePointsTable = os.path.join("in_memory",
                                         PFname + "_IdenticalPoints")
     arcpy.FindIdentical_management(ParcelPoints, DuplicatePointsTable,
                                    "X;Y", "#", "0", "ONLY_DUPLICATES")
     arcpy.AddJoin_management(ParcelPoints, "OBJECTID",
                              DuplicatePointsTable, "IN_FID", "KEEP_COMMON")
     saveDuplicates = pythonaddins.SaveDialog("Save Duplicate Points")
     arcpy.CopyFeatures_management(ParcelPoints, saveDuplicates)
     newPath, newLayer = os.path.split(saveDuplicates)
     arcpy.mapping.MoveLayer(
         df,
         arcpy.mapping.ListLayers(mxd, "FabricInvestigation")[0],
         arcpy.mapping.ListLayers(mxd, newLayer)[0], "BEFORE")
     DupPoints.checked = False
コード例 #9
0
def duplicados(capa, ruta):
    global gdb
    nombre_gdb = "duplicados_%s" % (
        datetime.datetime.now().strftime("%b_%d_%Y_%H_%M_%S"))
    nombre_gdb = nombre_gdb.replace(".", "")
    gdb = arcpy.CreateFileGDB_management(ruta, nombre_gdb)
    ##capa_copia=arcpy.CopyFeatures_management(in_features=capa,out_feature_class="in_memory\\%s"%(arcpy.ValidateTableName(arcpy.Describe(capa).name)))
    capa_copia = arcpy.CopyFeatures_management(
        in_features=capa,
        out_feature_class="%s\\%s" %
        (gdb, arcpy.ValidateTableName(arcpy.Describe(capa).name)))
    arcpy.AddField_management(capa_copia, "dupli", "TEXT", "", "", "", "",
                              "NULLABLE", "NON_REQUIRED", "")
    with arcpy.da.UpdateCursor(
            capa_copia,
        ["SHAPE@Y", "SHAPE@X", "SHAPE@AREA", "dupli"]) as cursor:
        for fila in cursor:
            ##       x=str(fila[1])[0:len(str(fila[1]))-2]
            ##       y=str(fila[0])[0:len(str(fila[0]))-2]
            Cx = float(str(fila[1])[0:len(str(fila[1]))])
            Cy = float(str(fila[0])[0:len(str(fila[0]))])
            CArea = float(str(fila[2])[0:len(str(fila[2]))])
            x = "{0:.2f}".format(Cx)  ###funcion para dejar solo dos decimales
            y = "{0:.2f}".format(Cy)  ###funcion para dejar solo dos decimales
            Area = "{0:.2f}".format(CArea)
            fila[3] = str(x) + "_" + str(y) + "_" + str(Area)
            cursor.updateRow(fila)

    arcpy.FindIdentical_management(in_dataset=capa_copia,
                                   out_dataset=str(gdb) + "\\duplicados",
                                   fields=["dupli"],
                                   output_record_option="ONLY_DUPLICATES")
    arcpy.Delete_management(
        "%s\\%s" % (gdb, arcpy.ValidateTableName(arcpy.Describe(capa).name)))
    registros = int(arcpy.GetCount_management(str(gdb) + "\\duplicados")[0])
    return str(registros) + ";" + "%s\\duplicados" % (gdb)
コード例 #10
0
def find_duplicates(fc, field_list):

    arcpy.env.workspace = "IN_MEMORY"
    arcpy.env.overwriteOutput = True

    arcpy.FindIdentical_management(fc,
                                   "temp_file",
                                   field_list,
                                   output_record_option="ONLY_DUPLICATES")

    select_list = []
    seq_list = []

    with arcpy.da.SearchCursor("temp_file", "*") as cursor:
        for row in cursor:

            seq_list.append(row[2])

            if seq_list.count(row[2]) > 1:
                select_list.append(row[1])

    query = "\"OBJECTID\"IN({0})".format(str(select_list).strip('[]'))

    arcpy.SelectLayerByAttribute_management(fc, "NEW_SELECTION", query)
コード例 #11
0
def findsinglevertex(layer):
	# find single vertices
	# FIXME
	# Integrate fails when path contains spaces
	# Possible way to fix it is to use arcpy.env and call layer by it's name
	try:
		arcpy.Integrate_management(layer, '0.1 Meters')
	except RuntimeError:
		arcpy.AddMessage(u'...Failed to run Integrate tool')

	arcpy.env.addOutputsToMap = False
	vert = u'in_memory\\vert'
	ident = u'in_memory\\ident'
	arcpy.FeatureVerticesToPoints_management(
			in_features = layer,
			out_feature_class = vert,
			point_location = u'ALL')

	# FIXME
	# test way to skip last points in polygons
	# sql-clause doesn't work in in_memory datasets
	oid_field = arcpy.Describe(layer).OIDFieldName
	lastpoints = []
	if arcpy.Describe(layer).shapeType == 'Polygon':
		prev_id = -1
		prev_row = -1
		with arcpy.da.SearchCursor(vert, ['OID@', 'ORIG_FID']) as sc:
			for row in sc:
				if row[1] != prev_id:
					lastpoints.append(prev_row)
					prev_id = row[1]
				prev_row = row[0]
			# append last point of last polygon
			lastpoints.append(row[0])

		with arcpy.da.UpdateCursor(vert, ['OID@']) as uc:
			for row in uc:
				if row[0] in lastpoints:
					uc.deleteRow()

	# points = {PointFID: [LineID, PointNo], ...}
	points = {}
	lines_id = []
	clause = (None, 'ORDER BY {0} DESC'.format(oid_field))
	with arcpy.da.SearchCursor(vert, ['OID@', 'ORIG_FID'], sql_clause = clause) as sc:
		feat_num = -1
		vert_num = -1
		for row in sc:
			if row[1] != feat_num:
				feat_num = row[1]
				vert_num = 0
				lines_id.append(feat_num)
			else:
				vert_num += 1
			points[row[0]] = [feat_num, vert_num]

	arcpy.FindIdentical_management(
			in_dataset = vert,
			out_dataset = ident,
			fields = u'SHAPE',
			xy_tolerance = u'0.2 Meters',
			output_record_option = u'ONLY_DUPLICATES')

	identical_v = [row[0] for row in arcpy.da.SearchCursor(ident, 'IN_FID')]  # ids of identical vetices

	single_pairs = [val for key, val in points.items() if key not in identical_v]

	single_out = {oid: [] for oid in lines_id}

	for p in single_pairs:
		single_out[p[0]].append(p[1])

	arcpy.Delete_management(vert)
	arcpy.Delete_management(ident)

	return single_out
コード例 #12
0
            pass
        else:
            # Find duplicate geometry
            flds = arcpy.ListFields(fc)
            fldNames = []
            for fld in flds:
                if fld.type not in ignoreType:
                    if fld.name.lower() not in [x.lower() for x in ignoreFld]:
                        fldNames.append(str(fld.name))

                #if fld.name.lower() not in [x.lower() for x in ignoreFLD]:
            dupeTable = "in_memory\\tmp"
            arcpy.FindIdentical_management(
                fc,
                dupeTable,
                fldNames,
                xy_tolerance=xyTol,
                z_tolerance=zTol,
                output_record_option="ONLY_DUPLICATES")

            # Get table count and pass the dataset if no duplicates exist
            fidList = []
            with arcpy.da.SearchCursor(dupeTable, ["IN_FID"]) as cursor:
                for row in cursor:
                    fidList.append(row[0])
            #expression = 'OBJECTID IN ({0})'.format(', '.join(map(str, fidList)) or 'NULL')
            tblCount = arcpy.GetCount_management(dupeTable)

            uniqdupCount = len(unique_values(dupeTable, "FEAT_SEQ"))
            dupCount = len(unique_values(dupeTable, "IN_FID"))
コード例 #13
0
    arcpy.CopyFeatures_management("Desig_union", "Desig_union_repair")
    MyFunctions.check_and_repair("Desig_union_repair")

    # Multipart to single part - there are many edge slivers spread over a long length of edge in multiple parts
    print("Converting to single part")
    arcpy.MultipartToSinglepart_management("Desig_union_repair",
                                           "Desig_union_repair_sp")

    # Delete identical shapes, which come either from overlaps in the union or from input dataset errors
    # Ideally you would inspect using Find Identical first, and make sure useful info is not being lost
    # i.e. none of the duplicates have differing attribute info
    # Inspection for Oxon has been carried out and only two polygons with different info were found
    # In both cases the polygon to keep is higher in the table so should be preserved

    arcpy.FindIdentical_management("Desig_union_repair_sp",
                                   "FindIdentical", ["Shape"],
                                   output_record_option="ONLY_DUPLICATES")
    print(
        "Deleting identical features. These are recorded in FindIdentical table - please check that the deletions are OK."
    )
    if check_identical:
        # Over 8000 identical shapes so no time to check them individually.
        print "Exiting so that you can check the identical polygons. When finished, set check_identical and first_part to False and restart."
        exit()

if second_part:
    print "Deleting identical shapes"
    arcpy.CopyFeatures_management("Desig_union_repair_sp",
                                  "Desig_union_repair_sp_delid")
    arcpy.DeleteIdentical_management("Desig_union_repair_sp_delid", ["Shape"])
コード例 #14
0
                            "Access_paths_merge_" + str(ipath - 1),
                            in_file + "_input",
                            "Access_paths_merge_" + str(ipath))
                    except:
                        print(
                            "Erase failed - please try manually in ArcMap and then comment out this section and restart"
                        )
                        exit()
                    print("Appending " + in_file + "_input to merged paths")
                    arcpy.Append_management(
                        ["Access_paths_merge_1" + str(ipath)],
                        in_file + "_input", "NO_TEST")
            else:
                # Check for any duplicate polygons
                arcpy.FindIdentical_management(
                    in_file + "_input",
                    "Identical_" + in_file, ["Shape"],
                    output_record_option="ONLY_DUPLICATES")
                numrows = arcpy.GetCount_management("Identical_" + in_file)
                if numrows > 0:
                    print(
                        "Warning - " + str(numrows) +
                        " duplicate polygons found in " + in_file +
                        "_input. All but one of each shape will be deleted.")
                    arcpy.DeleteIdentical_management(in_file + "_input",
                                                     ["Shape"])
                InAreas.append(in_file + "_input")

    print("Merging areas: " + ', '.join(InAreas))
    arcpy.Merge_management(InAreas, "Access_areas_merge")

    # Need to convert merged paths to single part otherwise it crashes
def validate_sgid_addresses_and_voting_precincts(in_directory,
                                                 in_dataset_name):

    #: add xy data to the in_dataset
    arcpy.AddXY_management(in_directory + in_dataset_name)

    #: Add field to mark flagged address records.
    flagged_exists = False
    dup_seqid_exists = False
    fields = arcpy.ListFields(in_directory + in_dataset_name)
    for field in fields:
        if field.name == "FLAGGED":
            flagged_exists = True
        elif field.name == "DUP_SEQID":
            dup_seqid_exists = True
    if flagged_exists == True:
        #: Delete it before creating it again (clears out any old values)
        arcpy.DeleteField_management(in_directory + in_dataset_name, "FLAGGED")
        arcpy.AddField_management(in_directory + in_dataset_name,
                                  "FLAGGED",
                                  "TEXT",
                                  field_length=255,
                                  field_is_nullable="NULLABLE")
    else:
        arcpy.AddField_management(in_directory + in_dataset_name,
                                  "FLAGGED",
                                  "TEXT",
                                  field_length=255,
                                  field_is_nullable="NULLABLE")
    if dup_seqid_exists == True:
        #: Delete it before creating it again (clears out any old values)
        arcpy.DeleteField_management(in_directory + in_dataset_name,
                                     "DUP_SEQID")
        arcpy.AddField_management(in_directory + in_dataset_name,
                                  "DUP_SEQID",
                                  "LONG",
                                  field_precision=6,
                                  field_is_nullable="NULLABLE")
    else:
        arcpy.AddField_management(in_directory + in_dataset_name,
                                  "DUP_SEQID",
                                  "LONG",
                                  field_precision=6,
                                  field_is_nullable="NULLABLE")

    #: find identical for address-based attributes only
    fields = ["UTAddPtID", "City", "ZipCode"]
    out_table = in_directory + "\\duplicate_addresspnts"
    duplicates = arcpy.FindIdentical_management(
        in_directory + in_dataset_name,
        out_table,
        fields,
        output_record_option="ONLY_DUPLICATES")

    #: Identify addresses that are duplicates by assigning the sequence id to the DUP_SEQID field.
    print("  begin assigning sequence ids...")
    transfer_seqIDs_to_featureclass(in_directory + in_dataset_name, duplicates)

    #: Loop through the dataset and check if any rows do not contain a voting precinct.
    print("  begin check for missing voting precinct...")
    check_for_missing_vp(in_directory + in_dataset_name)

    #: Loop through the duplicate addresses and see if it's an address point issue or voting precinct issue (if they have differnet x,y values then addrpnt, if same x,y then vp issue - ie: an overlap)
    print(
        "  begin checking duplicates to see if address point issue or voting precinct issue..."
    )
    check_duplicate_addresses_for_issue(in_directory + in_dataset_name,
                                        in_directory, duplicates)

    #: Check if there is a descrepency as to what county the address belongs to (ie: address point countyid does not match voting precinct countyid)
    print("  begin check for countyid discrepancies...")
    check_county_ids_for_discrepancies(in_directory + in_dataset_name)

    #: Clean up and delete the tables no longer needed.
    arcpy.Delete_management(in_directory + "\\duplicate_addresspnts")
    arcpy.Delete_management(in_directory + "\\summary_stats")
コード例 #16
0
                         out_coor_system=cs_ref)

#Create raster of weekly number of buses at the same resolution as bing data
# Convert weekly number of buses to integer
arcpy.AddField_management(NTMproj, 'adjustnum_int', 'SHORT')
with arcpy.da.UpdateCursor(NTMproj,
                           ['adjustnum_SUM', 'adjustnum_int']) as cursor:
    for row in cursor:
        if row[0]:
            row[1] = int(10 * row[0] + 0.5)
            cursor.updateRow(row)

#Split lines at all intersections so that small identical overlapping segments can be dissolved
arcpy.SplitLine_management(NTMproj, NTMproj + '_split')  #Split at intersection
arcpy.FindIdentical_management(
    NTMproj + '_split', "explFindID", "Shape"
)  #Find overlapping segments and make them part of a group (FEAT_SEQ)
arcpy.MakeFeatureLayer_management(NTMproj + '_split', "intlyr")
arcpy.AddJoin_management("intlyr",
                         arcpy.Describe("intlyr").OIDfieldName, "explFindID",
                         "IN_FID", "KEEP_ALL")
arcpy.Dissolve_management("intlyr",
                          NTMsplitdiss,
                          dissolve_field='explFindID.FEAT_SEQ',
                          statistics_fields=[[
                              os.path.split(NTMproj)[1] +
                              '_split.adjustnum_int', 'SUM'
                          ]])  #Dissolve overlapping segments
arcpy.RepairGeometry_management(
    NTMsplitdiss, delete_null='DELETE_NULL')  #sometimes creates empty geom
#Get the length of a half pixel diagonal to create buffers for
コード例 #17
0
#Only keep buses with trips and whose schedule lasts more than 1 day
arcpy.MakeFeatureLayer_management(PStransit, 'PStransit_lyr',
                                  where_clause= '(route_type = 3) AND (MIN_service_len > 1) AND (SUM_adjustnum > 0)')
arcpy.CopyFeatures_management('PStransit_lyr', PStransitbus)
arcpy.Project_management(PStransitbus, PStransitbus_proj, cs_ref)

#Create raster of weekly number of buses at the same resolution as bing data
# Convert weekly number of buses to integer
arcpy.AddField_management(PStransitbus_proj, 'adjustnum_int', 'SHORT')
arcpy.CalculateField_management(PStransitbus_proj, 'adjustnum_int',
                                expression='int(10*!SUM_adjustnum!+0.5)', expression_type='PYTHON')

#Split lines at all intersections so that small identical overlapping segments can be dissolved
arcpy.SplitLine_management(PStransitbus_proj, PStransitbus_proj + '_split') #Split at intersection
arcpy.FindIdentical_management(in_dataset=PStransitbus_proj + '_split', out_dataset=PStransitduplitab, fields="Shape") #Find overlapping segments and make them part of a group (FEAT_SEQ)
arcpy.MakeFeatureLayer_management(PStransitbus_proj + '_split', "intlyr")
arcpy.AddJoin_management("intlyr", arcpy.Describe("intlyr").OIDfieldName, PStransitduplitab, "IN_FID", "KEEP_ALL")
arcpy.Dissolve_management("intlyr", PStransitbus_splitdiss, dissolve_field='explFindID.FEAT_SEQ',
                          statistics_fields=[[os.path.split(PStransitbus_proj)[1] + '_split.adjustnum_int', 'SUM']]) #Dissolve overlapping segments
arcpy.RepairGeometry_management(PStransitbus_splitdiss, delete_null = 'DELETE_NULL') #sometimes creates empty geom

#Get the length of a half pixel diagonal to create buffers for
#guaranteeing that segments potentially falling within the same pixel are rasterized separately
tolerance = (2.0**0.5)*float(restemplate.getOutput(0))/2
arcpy.env.workspace = os.path.dirname(soundtransit)
ExplodeOverlappingLines(PStransitbus_splitdiss, tolerance)

#For each set of non-overlapping lines, create its own raster
tilef = 'expl'
tilelist = list(set([row[0] for row in arcpy.da.SearchCursor(PStransitbus_splitdiss, [tilef])]))
    arcpy.Delete_management(fc)
for tbl in arcpy.ListTables():
    arcpy.Delete_management(tbl)

sr = arcpy.Describe(sourceStreets).spatialReference
arcpy.CreateFeatureclass_management(destGDB,'StreetIntersection','POINT',spatial_reference=sr,out_alias='Steet Intersection')
for fName,fAlias in fldNames.items():
    #print(fName,fAlias)
    arcpy.AddField_management (stIntersection,fName,'TEXT',field_length=100,field_alias=fAlias,)


# In[9]:


arcpy.FeatureVerticesToPoints_management(sourceStreets,stVertices,'BOTH_ENDS')
arcpy.FindIdentical_management(stVertices,identEnds,'SHAPE',output_record_option='ONLY_DUPLICATES')


# In[10]:


lastFeatSeq = [row for row in arcpy.da.SearchCursor(identEnds, "FEAT_SEQ")][-1]
lastFeatSeq = lastFeatSeq[0]+1
print(f"number of intersections: {lastFeatSeq}")


# In[14]:

# TODO create list from column Feat_Seq instead of range()
arr = arcpy.da.TableToNumpyArray(identEnds,'FEAT_SEQ')
df = pd.DataFrame(arr)
コード例 #19
0
        "Zero feature datasets found in " + workpath +
        ". \n Try migrating your feature classes to a feature datasets.")
    sys.exit(0)
else:
    for dataset in FDSs:
        for fc in arcpy.ListFeatureClasses('', '', dataset):
            fcCount = arcpy.GetCount_management(fc)
            if int(fcCount[0]) == 0:
                arcpy.AddMessage("No features in " + fc + " ... skipping!")
                pass
            else:
                # Find duplicate geometry
                dupeTable = "in_memory\\tmp"
                arcpy.FindIdentical_management(
                    fc,
                    "in_memory\\tmp", ["Shape"],
                    xy_tolerance=xyTolerance,
                    z_tolerance=zTolerance,
                    output_record_option="ONLY_DUPLICATES")

                # Get table count and pass the dataset if no duplicates exist
                tblCount = arcpy.GetCount_management(dupeTable)
                print fc + " duplicate feature count: " + str(tblCount)
                arcpy.AddMessage(fc + " duplicate feature count: " +
                                 str(tblCount))
                if int(tblCount[0]) == 0:
                    arcpy.Delete_management("in_memory\\tmp")
                    continue

                else:
                    # Get Object IDs of duplicate values to build expression for layer file
                    fidList = []
コード例 #20
0
                                                  "0.001 Meters", "ATTRIBUTES")

#: Delete the newly-created FID_TrailsPathways field (this will keep the schema the same and also allow the append below w/o "NO_TEST")
arcpy.DeleteField_management(finished_product, ["FID_TrailsAndPathways"])

#: Append the segments (bridges and tunnels) that did not get split back into the dataset
arcpy.MakeFeatureLayer_management(trails_paths, "trails_paths_bridges_lyr",
                                  "CartoCode = '8 - Bridge, Tunnel'")
print("begin append cartocode 8's")
arcpy.management.Append("trails_paths_bridges_lyr", finished_product)

#: Find duplicate uniqueids - these will need to be recalculated using the arcmap tool
print("begin find duplicate Unique_IDs")
unique_dup_tbl = arcpy.FindIdentical_management(
    finished_product,
    dup_uniqueid_table,
    "Unique_ID",
    output_record_option="ONLY_DUPLICATES")

#: Null out the duplicates
print("begin null out duplicates unique_ids")
joined_fc = arcpy.management.JoinField(finished_product, "OBJECTID",
                                       unique_dup_tbl, "IN_FID")

#: Make feature layer where IN_FID is not null and then calculate UniqueID values to null where this is true
arcpy.MakeFeatureLayer_management(joined_fc, "joined_fc_lyr",
                                  "IN_FID is not NULL")
arcpy.management.CalculateField("joined_fc_lyr", "Unique_ID", "0")

#: Remove the join fields
arcpy.DeleteField_management(finished_product, ["IN_FID", "FEAT_SEQ"])
コード例 #21
0
# arcpy_Append_Example.py
# Created on: 2017-02-13 14:18:40.00000
#   (generated by ArcGIS/ModelBuilder)
# Description: 
# ---------------------------------------------------------------------------

# Set the necessary product code
# import arcinfo


# Import arcpy module
import arcpy


# Local variables:
tmp_BufLYR = "tmp_BufLYR"
location_coordinate_buffers__4_ = tmp_BufLYR
location_coordinate_buffers = "location_coordinate_buffers"
location_coordinate_buffers__2_ = location_coordinate_buffers
location_coordinate_buffers_ = "C:\\Users\\chrism\\Documents\\ArcGIS\\Default.gdb\\location_coordinate_buffers_"

# Process: Truncate Table
arcpy.TruncateTable_management(location_coordinate_buffers)

# Process: Append
arcpy.Append_management("tmp_BufLYR", location_coordinate_buffers__2_, "NO_TEST", "LOCCRD_SEQ_NR_FK \"LOCCRD_SEQ_NR_FK\" true false false 4 Long 0 0 ,First,#,tmp_BufLYR,LOCCRD_SEQ_NR_FK,-1,-1;ACCURACY_LVL_CD \"ACCURACY_LVL_CD\" true true false 2 Text 0 0 ,First,#,tmp_BufLYR,ACCURACY_LVL_CD,-1,-1;CREATED_BY \"CREATED_BY\" true true false 20 Text 0 0 ,First,#,tmp_BufLYR,CREATED_BY,-1,-1;CREATED_DATE \"CREATED_DATE\" true true false 8 Date 0 0 ,First,#,tmp_BufLYR,CREATED_DATE,-1,-1;LAST_MODIFIED_BY \"LAST_MODIFIED_BY\" true true false 20 Text 0 0 ,First,#,tmp_BufLYR,LAST_MODIFIED_BY,-1,-1;LAST_MODIFIED_DATE \"LAST_MODIFIED_DATE\" true true false 8 Date 0 0 ,First,#,tmp_BufLYR,LAST_MODIFIED_DATE,-1,-1;LOCCRD_SEQ_NR \"LOCCRD_SEQ_NR\" true true false 4 Long 0 0 ,First,#,tmp_BufLYR,LOCCRD_SEQ_NR,-1,-1;SHAPE_Length \"SHAPE_Length\" false true true 8 Double 0 0 ,First,#,tmp_BufLYR,SHAPE_Length,-1,-1;SHAPE_Area \"SHAPE_Area\" false true true 8 Double 0 0 ,First,#,tmp_BufLYR,SHAPE_Area,-1,-1", "")

# Process: Find Identical
arcpy.FindIdentical_management(location_coordinate_buffers__4_, location_coordinate_buffers_, "LOCCRD_SEQ_NR_FK;LOCCRD_SEQ_NR", "", "0", "ALL")