Esempio n. 1
0
def generateCurves(fc):
    desc = arcpy.Describe(fc)
    fc_name = desc.name
    fc_gdb = desc.path
    Curves = fc_gdb + "\\" + fc_name + "_curves_polygon"
    if arcpy.Exists(Curves):
        arcpy.Delete_management(Curves)
    arcpy.CreateFeatureclass_management(fc_gdb, fc_name + "_curves_polygon",
                                        "POLYGON", "", "", "", fc)
    for row in arcpy.da.SearchCursor(fc, ['SHAPE@']):
        pts = arcpy.Array()
        j = json.loads(row[0].JSON)
        if 'curve' in str(j):
            #print "You have true curves!"
            try:
                exe = [
                    pts.add(arcpy.Point(f[0], f[1])) for i in row[0] if i
                    for f in i if f
                ]
            except:
                exe = [pts.add(f) for i in row[0] if i for f in i if f]
        if pts:
            pts.add(pts.getObject(0))
            polygon = arcpy.Polygon(pts,
                                    arcpy.SpatialReference("Israel TM Grid"))
            diff = polygon.symmetricDifference(row[0])
            diff_sp = arcpy.MultipartToSinglepart_management(
                diff, arcpy.Geometry())
            if len(diff_sp) > 0:
                arcpy.Append_management(diff_sp, Curves, "NO_TEST")
    return Curves
def generateHighLow(workspace, name, clip_contours, ref_md):
    cont_poly1 = os.path.join(workspace, 'O12_poly_' + name + '.shp')
    cont_poly2 = os.path.join(workspace, 'O13_poly_' + name + '.shp')
    arcpy.FeatureToPolygon_management(in_features=clip_contours,
                                      out_feature_class=cont_poly1,
                                      cluster_tolerance="",
                                      attributes="ATTRIBUTES",
                                      label_features="")
    arcpy.MultipartToSinglepart_management(in_features=cont_poly1,
                                           out_feature_class=cont_poly2)
    select_set = []
    with arcpy.da.UpdateCursor(
            cont_poly2, ["FID", "SHAPE@"]) as cursor:  # @UndefinedVariable
        for row in cursor:
            parts = row[1].partCount
            boundaries = row[1].boundary().partCount
            if boundaries > parts:
                select_set.append(row[0])

    cont_poly3 = 'O13_poly_' + name + '_layer'
    arcpy.MakeFeatureLayer_management(in_features=cont_poly2,
                                      out_layer=cont_poly3,
                                      where_clause='"FID" IN(' +
                                      ','.join(select_set) + ')',
                                      workspace="",
                                      field_info="")
    arcpy.DeleteFeatures_management(cont_poly3)
    arcpy.AddSurfaceInformation_3d(in_feature_class=cont_poly2,
                                   in_surface=ref_md,
                                   out_property="Z_MEAN",
                                   method="BILINEAR")
Esempio n. 3
0
def Delete_Duplic_Line(fc):

	del_layer    = 'in_memory' + '\\' + 'arc_inter'
	diss_layer   = 'in_memory' + '\\' + 'diss_layer'
	Append_layer = 'in_memory' + '\\' + 'Append_layer'

	arcpy.Intersect_analysis          ([fc],del_layer)

	if int(str(arcpy.GetCount_management(del_layer))) > 0:

                del_layer_temp = 'in_memory' + '\\' + 'Temp'
                arcpy.Dissolve_management(del_layer,del_layer_temp)

                geom_del = [row.shape for row in arcpy.SearchCursor (del_layer_temp)][0]
                Ucursor  = arcpy.UpdateCursor (fc)
                for row in Ucursor:
                        for row in Ucursor:
                            if geom_del:
                                if row.shape:
                                    geom_up     = row.shape
                                    new_geom    = geom_up.difference(geom_del)
                                    row.shape = new_geom
                                    Ucursor.updateRow (row)


                arcpy.Dissolve_management              (del_layer,diss_layer)
                arcpy.MultipartToSinglepart_management (diss_layer,Append_layer)
                arcpy.Append_management                (Append_layer,fc,"NO_TEST")
    def process_downloaded_data(self, input_fc_list):
        """
        Defines a field map and uses it to merge the input feature classes (logging road extracts from different
        regions). Dissolves the output to get rid of duplicate features.
        :param input_fc_list:
        :return:
        """

        if len(input_fc_list) == 1:
            single_part_fc = input_fc_list[0]

        else:
            field_max_dict = self.get_max_len_all_fields(input_fc_list)

            fm_dict = {k: {'out_length': v} for k, v in field_max_dict.iteritems()}
            fms = field_map.build_field_map(input_fc_list, fm_dict)

            merged_fc = os.path.join(self.download_workspace, 'merged_output.shp')
            arcpy.Merge_management(input_fc_list, merged_fc, fms)

            dissolved_fc = os.path.join(self.download_workspace, 'dissolved.shp')
            out_fields = ['osm_id', 'access', 'bridge', 'end_date', 'ferry', 'ford', 'highway', 'informal',
                          'maxspeed', 'name', 'oneway', 'opening_ho', 'operator', 'ref', 'route', 'seasonal',
                          'smoothness', 'source', 'start_date', 'surface', 'trail_visi', 'tunnel', 'width']

            arcpy.Dissolve_management(merged_fc, dissolved_fc, ';'.join(out_fields), "", "MULTI_PART", "DISSOLVE_LINES")

            single_part_fc = os.path.join(self.download_workspace, 'single_part_final.shp')
            arcpy.MultipartToSinglepart_management(dissolved_fc, single_part_fc)

        self.data_source = single_part_fc
Esempio n. 5
0
def extract_hydro_points(drain, show, folder, gdb):
    gp = arcgisscripting.create()
    gp.CheckOutExtension("Spatial")
    gp.SetProgressor('default', 'starting vertex extraction...')
    arcpy.env.overwriteOutput = True
    arcpy.env.addOutputsToMap = show

    if not os.path.exists(os.path.join(folder, '{}.gdb'.format(gdb))):
        arcpy.CreateFileGDB_management(out_folder_path=folder,
                                       out_name='{}.gdb'.format(gdb))

    gp.AddMessage('Processing Extract Vertex ...')
    arcpy.Intersect_analysis(in_features='{} #'.format(drain),
                             out_feature_class=os.path.join(
                                 folder, 'temp', 'hydro_multi_points.shp'),
                             join_attributes='ALL',
                             cluster_tolerance='-1 Unknown',
                             output_type='POINT')

    arcpy.AddXY_management(
        in_features=os.path.join(folder, 'temp', 'hydro_multi_points.shp'))
    arcpy.DeleteIdentical_management(in_dataset=os.path.join(
        folder, 'temp', 'hydro_multi_points.shp'),
                                     fields="POINT_X;POINT_Y",
                                     xy_tolerance="",
                                     z_tolerance="0")
    arcpy.MultipartToSinglepart_management(
        in_features=os.path.join(folder, 'temp', 'hydro_multi_points.shp'),
        out_feature_class=os.path.join(folder, '{}.gdb'.format(gdb),
                                       'hydro_points'))

    gp.AddMessage('Finish')
Esempio n. 6
0
def CleanFeatures(inFeats, outFeats):
    '''Repairs geometry, then explodes multipart polygons to prepare features for geoprocessing.'''

    # Process: Repair Geometry
    arcpy.RepairGeometry_management(inFeats, "DELETE_NULL")

    # Have to add the while/try/except below b/c polygon explosion sometimes fails inexplicably.
    # This gives it 10 tries to overcome the problem with repeated geometry repairs, then gives up.
    counter = 1
    while counter <= 10:
        try:
            # Process: Multipart To Singlepart
            arcpy.MultipartToSinglepart_management(inFeats, outFeats)

            counter = 11

        except:
            arcpy.AddMessage("Polygon explosion failed.")
            # Process: Repair Geometry
            arcpy.AddMessage("Trying to repair geometry (try # %s)" %
                             str(counter))
            arcpy.RepairGeometry_management(inFeats, "DELETE_NULL")

            counter += 1

            if counter == 11:
                arcpy.AddMessage(
                    "Polygon explosion problem could not be resolved.  Copying features."
                )
                arcpy.CopyFeatures_management(inFeats, outFeats)

    return outFeats
Esempio n. 7
0
def pgon_to_pline(in_fc, out_fc):
    """Polygon to polyline conversion.  Multipart shapes are converted to
    singlepart.  The singlepart geometry is used to produce the polylines."""
    result = check_path(out_fc)
    if result[0] is None:
        print(result[1])
        return result[1]
    gdb, name = result
    SR = getSR(in_fc)
    temp = arcpy.MultipartToSinglepart_management(in_fc, r"memory\in_fc_temp")
    a, IFT, IFT_2 = fc_geometry(temp, SR)
    tweet("\n(1) fc_geometry complete...")
    d = fc_data(temp)
    tweet("\n(2) featureclass data complete...")
    info = "pgon to pline"
    b = Geo(a, IFT=IFT, Kind=1, Info=info)  # create the geo array
    tweet("\n(3) Geo array complete...")
    done = geometry_fc(b, IFT, p_type="POLYLINE", gdb=gdb, fname=name, sr=SR)
    tweet("\n(4) " + done)
    if arcpy.Exists(out_fc):
        import time
        time.sleep(1.0)
    try:
        arcpy.da.ExtendTable(out_fc, 'OBJECTID', d, 'OID_')
        tweet("\n(5) ExtendTable complete...")
    finally:
        tweet("\narcpy.da.ExtendTable failed... try a spatial join after.")
    msg = """\n
        ----
        Multipart shapes have been converted to singlepart, so view any data
        carried over during the extendtable join as representing those from
        the original data.  Recalculate values where appropriate.
        ----
        """
    tweet(dedent(msg))
Esempio n. 8
0
def UnsplitLines(inLines, outLines, scratchGDB=arcpy.env.scratchGDB):
    '''Does what it seems the arcpy.UnsplitLine_management function SHOULD do, but doesn't.
   
   Parameters:
   - inLines = input line feature class
   - outLines = output line feature class
   - scratchGDB = geodatabase to hold intermediate products
   '''
    printMsg("Buffering segments...")
    buffLines = scratchGDB + os.sep + "buffLines"
    arcpy.Buffer_analysis(inLines, buffLines, "1 Meters", "FULL", "ROUND",
                          "ALL")

    printMsg("Exploding buffers...")
    explBuff = scratchGDB + os.sep + "explBuff"
    arcpy.MultipartToSinglepart_management(buffLines, explBuff)

    printMsg("Grouping segments...")
    arcpy.AddField_management(explBuff, "grpID", "LONG")
    arcpy.CalculateField_management(explBuff, "grpID", "!OBJECTID!", "PYTHON")

    joinLines = scratchGDB + os.sep + "joinLines"
    fldMap = 'grpID "grpID" true true false 4 Long 0 0, First, #, %s, grpID, -1, -1' % explBuff
    arcpy.SpatialJoin_analysis(inLines, explBuff, joinLines, "JOIN_ONE_TO_ONE",
                               "KEEP_ALL", fldMap, "INTERSECT")

    printMsg("Dissolving segments by group...")
    arcpy.Dissolve_management(joinLines, outLines, "grpID", "", "MULTI_PART",
                              "DISSOLVE_LINES")

    return outLines
Esempio n. 9
0
def get_contour():
    # buffer the queried route by 1 foot
    arcpy.Buffer_analysis(ROUTE_FL, BUFFER_FL, buffer_distance_or_field="1 Feet", dissolve_option="NONE")
    # arcpy.CopyFeatures_management(BUFFER_FL, os.path.join(gdb, "buffer"))  # for testing, comment out when not in use
    arcpy.AddMessage("Buffer Complete")

    # clip the contours with the buffer
    arcpy.AddMessage("Clipping Contours....")
    arcpy.Clip_analysis(contour_5ft, BUFFER_FL, CLIPPED_CONTOUR_FL)
    arcpy.CopyFeatures_management(CLIPPED_CONTOUR_FL, os.path.join(gdb, "CONTOUR"))
    arcpy.AddMessage("Contour Clip Complete")

    # intersect Queried Route with Clipped Contour to generate points for loading
    arcpy.AddMessage("Intersecting Contours with Route....")
    arcpy.Intersect_analysis([ROUTE_FL, CLIPPED_CONTOUR_FL], INTERSECT_POINTS_FL_MP, "ALL", 0, "POINT")
    arcpy.AddMessage("Intersect Complete")

    # explode multipart features
    arcpy.AddMessage("Exploding Multipart Features....")
    arcpy.MultipartToSinglepart_management(INTERSECT_POINTS_FL_MP, INTERSECT_POINTS_FL)
    arcpy.AddMessage("Explode Complete")

    # locate the intersected points along the route and give them a measure value
    arcpy.AddMessage("Locating Features Along Route.....")
    arcpy.LocateFeaturesAlongRoutes_lr(INTERSECT_POINTS_FL, route_copy, rid_field, "50 Feet",
                                       out_table, props, "FIRST")
    arcpy.AddMessage("Locate Complete")
Esempio n. 10
0
def Voronoi_Lines(inFC,Output):

    try:
    
        #Variables
        temp = 'in_memory\\tempdata'
        temp2 = 'in_memory\\tempdata2'

        arcpy.FeatureVerticesToPoints_management(inFC,temp, "ALL")
        arcpy.CreateThiessenPolygons_analysis(temp, temp2, "ONLY_FID")
        arcpy.PolygonToLine_management(temp2, temp)
        arcpy.Intersect_analysis([temp, inFC], temp2, "ALL")
        arcpy.MultipartToSinglepart_management(temp2, Output)

        fieldNames = []
        for field in arcpy.ListFields(Output):
            if not field.required and field.name != 'Id':
                fieldNames.append(field.name)
        arcpy.DeleteField_management(Output,fieldNames)

        Centerline(Output)
        Width(Output,inFC)
        Deviation(Output)
        
    except Exception,e:
        arcpy.AddError('%s'%(e))
Esempio n. 11
0
def Insert_needed_arc(parcel_bankal, ws, gdb):

    Keshet = generateCurves(parcel_bankal)

    print "Insert_needed_arc"
    tazar_c = ws + '\\' + 'PARCELS_inProc_edit_copy'
    arc_bankal = gdb + '\\' + 'PARCEL_ARC_EDIT'
    arc_bankal_single = ws + '\\' + 'PARCEL_ARC_EDIT_single'
    arc_diss = ws + '\\' + 'arc__Diss'
    parce_to_line = ws + '\\' + 'parcel_to_line'
    error_line = ws + '\\' + 'Errors_Line'

    arcpy.MultipartToSinglepart_management(arc_bankal, arc_bankal_single)

    arcpy.MakeFeatureLayer_management(parcel_bankal, 'arc_bankal_single_lyr')
    arcpy.SelectLayerByLocation_management('arc_bankal_single_lyr',
                                           "INTERSECT", tazar_c, '100 Meters')

    polygon_to_line('arc_bankal_single_lyr', parce_to_line)
    arcpy.Dissolve_management(arc_bankal, arc_diss)
    data = [i.shape for i in arcpy.SearchCursor(arc_diss)][0]
    with arcpy.da.UpdateCursor(parce_to_line, ['SHAPE@']) as cursor:
        for row in cursor:
            geom = row[0]
            new_geom = geom.difference(data)
            row[0] = new_geom
            cursor.updateRow(row)

    arcpy.MakeFeatureLayer_management(parce_to_line, 'par_bankal_to_line_lyr')
    arcpy.SelectLayerByLocation_management('par_bankal_to_line_lyr',
                                           "INTERSECT", Keshet, '0.1 Meters')
    arcpy.DeleteFeatures_management('par_bankal_to_line_lyr')

    Calc_field_value_error(parce_to_line, error_line, "7",
                           ErrorDictionary["7"])
Esempio n. 12
0
def generateCurves(fc):
    desc = arcpy.Describe(fc)
    fc_name = desc.name
    fc_gdb = desc.path
    Curves = fc_gdb + "\\" + fc_name + "_curves_polygon"
    #print "generateCurves("+fc_name+")..."
    arcpy.CreateFeatureclass_management(fc_gdb, fc_name + "_curves_polygon",
                                        "POLYGON", "", "", "", fc)
    curveFeatureList = []
    for row in arcpy.SearchCursor(fc):
        pts = []
        geom = row.Shape
        j = json.loads(geom.JSON)
        if 'curve' in str(j):
            #print "You have true curves!"
            coords = geom.__geo_interface__['coordinates']
            for i in coords:
                if i:
                    for f in i:
                        if f:
                            pts.append(arcpy.Point(f[0], f[1]))
        if pts:
            polygon = PtsToPolygon(pts)
            diff = polygon.symmetricDifference(geom)
            diff_sp = arcpy.MultipartToSinglepart_management(
                diff, arcpy.Geometry())
            if len(diff_sp) > 0:
                arcpy.Append_management(diff_sp, Curves, "NO_TEST")
    return Curves
def explode_polygons(in_location, out_location, previous_suffix, suffix):
    # Check if out_location was already created
    if not arcpy.Exists(out_location):
        path, gdb_file = os.path.split(out_location)
        create_gdb(path, gdb_file, out_location)
    arcpy.env.workspace = in_location
    fc_list = arcpy.ListFeatureClasses()
    for fc in fc_list:
        print fc
        in_fc = in_location + os.sep + fc
        out_name = fc + suffix
        out_name = out_name.replace(previous_suffix, "")
        out_feature = out_location + os.sep + out_name
        try:
            if not arcpy.Exists(out_feature):
                arcpy.Delete_management("inFeatures")
                arcpy.MakeFeatureLayer_management(in_fc, "inFeatures")
                arcpy.MultipartToSinglepart_management("inFeatures",
                                                       out_feature)
            else:
                continue

        except Exception as error:
            print(error.args[0])
            arcpy.Delete_management(out_feature)
def set_edge_weight(KVL_Dissolve):
    arcpy.AddField_management(KVL_Dissolve, "Weight", "FLOAT")
    dictionary = {
        u"35 кВ": 0.07,
        u"110 кВ": 0.02,
        u"150 кВ": 0.01,
        u"220 кВ": 0.005,
        u"330 кВ": 0.0025,
        u"400 кВ": 0.001428571,
        u"500 кВ": 0.001111111,
        u"750 кВ": 0.000454545,
        None: None
    }
    rows = arcpy.da.UpdateCursor(KVL_Dissolve, ["Voltage", "Weight"])
    for row in rows:
        row[1] = dictionary[row[0]]
        rows.updateRow(row)
    del row, rows
    arcpy.DeleteField_management(KVL_Dissolve, [
        'SymbolID', 'AltMode', 'Base', 'Clamped', 'Extruded', 'Snippet',
        'PopupInfo'
    ])
    single_part = arcpy.MultipartToSinglepart_management(
        KVL_Dissolve, "Single_Part")
    lines_p = arcpy.FeatureClassToFeatureClass_conversion(
        single_part,
        "Network",
        "Lines_p",
        where_clause="Status <> 'Строительство'")
    return lines_p
Esempio n. 15
0
    def mustBeSinglePart_Area(self, feature):

        # Géodatabase courante
        #geodatabaseTopage = geodatabase()

        # Géodatabase courante
        geodatabase_path = geodatabaseTopage.path(feature)

        # Création du fichier des points extrèmes pour chaque ligne
        if arcpy.Exists(geodatabase_path + "/feature_checking"):
            arcpy.Delete_management(geodatabase_path + "/feature_checking")
        arcpy.MultipartToSinglepart_management(
            feature, geodatabase_path + "/feature_checking")

        # Compte des features disloquées et des feature originel
        compte_feature = arcpy.GetCount_management(feature).getOutput(0)
        compte_featureTest = arcpy.GetCount_management(
            geodatabase_path + "/feature_checking").getOutput(0)

        # Suppression du fichier des points créé pour le traitement
        arcpy.Delete_management(geodatabase_path + "/feature_checking")

        if int(compte_feature) == int(compte_featureTest):
            return "true"
        else:
            return "false"
def multipart2singlepart(inputPath, outputPath):
  dateTime = datetime.now()
  logger.info("Multipart To Singlepart")
  outputFilename = nameModify(outputPath, "Single")
  arcpy.MultipartToSinglepart_management(inputPath, outputFilename)
  deleteField(outputFilename, "ORIG_FID")
  arcpyLogger(arcpyVerboseLevel)
  logger.info("The singlepart file, generated in " + str(duration(dateTime)) + " is : " + outputFilename)
  return outputFilename
Esempio n. 17
0
    def Multi_to_single(self, temp_lyer=''):

        temp_lyer = self.gdb + '\\' + 'Temp'
        save_name = self.layer
        arcpy.MultipartToSinglepart_management(self.layer, temp_lyer)
        arcpy.Delete_management(self.layer)
        arcpy.Rename_management(temp_lyer, save_name)

        return save_name
Esempio n. 18
0
 def freqRio_7(self):
     dissol = arcpy.Dissolve_management(in_features=redhidrica,
                                        out_feature_class=os.path.join(
                                            self.scratchFolder, "temp1"),
                                        dissolve_field="grid_code")
     mp = arcpy.MultipartToSinglepart_management(dissol,
                                                 "in_memory\\multipart")
     NTc = int(arcpy.GetCount_management(mp).getOutput(0))
     fr = NTc / self.area
     return fr
Esempio n. 19
0
def get_default_Snap_border(point_bankal, tazar, Distance_min):
    '''
    [INFO] - בודק את נקודות הבנקל ליד התצ"ר במידה ויש 2 נקודות בנקל קרובות אחת לשניה, הוא נותן את המרחק הזה כברירת מחדל
    INPUT-
    1) point_bankal - שכבת נוקודת בנקל
    2) tazar        - שכבת תצ"ר של המודד
    3) Distance_min - מה המינימום, יופעל במידה שמרחק הנקודות גדול מידי
    OUTPUT-
    1) המרחק בקטן ביותר בין שני נקודות בנק"ל בסמוך לתצר
    '''

    GDB = os.path.dirname(tazar)

    PntTmp = r'in_memory' + '\\' + 'PntTmp'
    buffer = r'in_memory' + '\\' + 'buffer'
    dissol = r'in_memory' + '\\' + 'dissol'
    multiP = r'in_memory' + '\\' + 'multiP'

    def Getmin(list1, Dis_min=2):
        li = [i[2] for i in list1 if i[2] < 1]
        return min(li) - 0.01 if li else Dis_min

    arcpy.MakeFeatureLayer_management(point_bankal, 'path_lyr')
    arcpy.SelectLayerByLocation_management('path_lyr', 'WITHIN_A_DISTANCE',
                                           tazar, '5 Meters')
    arcpy.Select_analysis('path_lyr', PntTmp)
    arcpy.MakeFeatureLayer_management(PntTmp, 'PntTmp_lyr')
    arcpy.SelectLayerByLocation_management('PntTmp_lyr', "COMPLETELY_WITHIN",
                                           tazar)
    arcpy.SelectLayerByAttribute_management('PntTmp_lyr', "SWITCH_SELECTION")

    arcpy.Buffer_analysis('PntTmp_lyr', buffer, 0.5)
    arcpy.Dissolve_management(buffer, dissol)
    arcpy.MultipartToSinglepart_management(dissol, multiP)

    with arcpy.da.UpdateCursor(multiP, ['SHAPE@AREA']) as cursor:
        for row in cursor:
            if row[0] < 0.8:
                cursor.deleteRow()

    arcpy.MakeFeatureLayer_management(PntTmp, 'path2_lyr')
    arcpy.SelectLayerByLocation_management('path2_lyr', 'INTERSECT', multiP)

    dis_point = [[
        row[0], row[1]
    ] for row in arcpy.da.SearchCursor('path2_lyr', ['OBJECTID', 'SHAPE@'])]
    list_dis = [
        [row[1], n[0], row[0].distanceTo(n[1])] for n in dis_point
        for row in arcpy.da.SearchCursor('path2_lyr', ['SHAPE@', 'OID@'])
        if row[0].distanceTo(n[1]) > 0
    ]

    Min_dist = Getmin(list_dis, Distance_min)
    print_arcpy_message(Min_dist, status=1)
    return Min_dist
Esempio n. 20
0
def explode_m(shp_p, new_shp):
	"""
	:param shp_p: 需要拆分多部件的shp文件地址
	:param new_shp: 保存地址
	:return:
	"""
	arcpy.env.overwriteOutput = True
	base = "base.shp"
	arcpy.MakeFeatureLayer_management(shp_p, base)
	print u"拆分多部件..."
	arcpy.MultipartToSinglepart_management(base, new_shp)
	print u"多部件拆分完成!"
Esempio n. 21
0
def singlePart(fc):
    #-------------------------------
    if eco:
        imprimir("SINGLE PART ==> " + fc)
    if arcpy.Exists(fc + "1"):
        arcpy.Delete_management(fc + "1")
    arcpy.MultipartToSinglepart_management(fc, fc + "1")
    if arcpy.Exists(fc):
        arcpy.Delete_management(fc)
    if arcpy.Exists(fc):
        arcpy.Delete_management(fc)
    arcpy.Rename_management(fc + "1", fc)
def flatten_poly_fc(in_layer_path, out_gdb_path, query=None):
    '''Check for overlaps and flatten, super region poly knockoff, POLYID joins back to original data'''
    try:
        log("Flattening {} due to overlaps".format(in_layer_path))
        in_layer_nm = os.path.splitext(os.path.basename(in_layer_path))[0]
        shattered_fc = os.path.join(out_gdb_path, in_layer_nm + "_shattered")
        if query:
            log("We have a query: {}".format(query))
            f_lyr = "f_lyr"
            arcpy.MakeFeatureLayer_management(in_layer_path, f_lyr, where_clause=query)
            arcpy.Union_analysis(f_lyr, shattered_fc, "ALL", "", "GAPS")
            log(arcpy.GetMessages())
        else:
            arcpy.Union_analysis(in_layer_path, shattered_fc, "ALL", "", "GAPS"); log(arcpy.GetMessages())
        shattered_singlepart_fc = os.path.join(out_gdb_path, in_layer_nm + "_shattered_singlepart") #this
        arcpy.MultipartToSinglepart_management(shattered_fc, shattered_singlepart_fc); log(arcpy.GetMessages())
        polyid_field_nm = "POLYID"
        arcpy.AddField_management(shattered_singlepart_fc, polyid_field_nm, "LONG"); log(arcpy.GetMessages())
        polyid_dict = {}
        polyid_value = 1
        decimal_tolerance = 2
        field_list = ["OID@","SHAPE@XY","SHAPE@AREA", polyid_field_nm]
        update_rows = arcpy.da.UpdateCursor(shattered_singlepart_fc, field_list)
        for row in update_rows:
            axyvalue = (round(row[1][0], decimal_tolerance), round(row[1][1], decimal_tolerance), round(row[2], decimal_tolerance))
            if axyvalue not in polyid_dict:
                polyid_dict[axyvalue] = polyid_value
                polyid_value = polyid_value + 1
            row[3] = polyid_dict[axyvalue]
            update_rows.updateRow(row)
        del row, update_rows
        del polyid_dict

        final_fc = os.path.join(out_gdb_path, in_layer_nm + "_flattened")
        try:
            arcpy.Dissolve_management(shattered_singlepart_fc, final_fc, polyid_field_nm, "", "SINGLE_PART"); log(arcpy.GetMessages())
        except:
            log("Failed initial Dissolve, repairing geometry and trying again")
            arcpy.RepairGeometry_management(shattered_singlepart_fc); log(arcpy.GetMessages())
            arcpy.Dissolve_management(shattered_singlepart_fc, final_fc, polyid_field_nm, "", "SINGLE_PART"); log(arcpy.GetMessages())

        log("Creating POLYID lookup table")
        polyid_fc = os.path.join(out_gdb_path, in_layer_nm + "_polyid")
        fid_field = next(i.name for i in arcpy.ListFields(shattered_singlepart_fc) if "FID" in i.name)
        arcpy.Frequency_analysis(shattered_singlepart_fc, polyid_fc,"POLYID;{}".format(fid_field), "");log(arcpy.GetMessages())
        arcpy.AddField_management(polyid_fc, "flattened_POLYID", "LONG");log(arcpy.GetMessages())
        arcpy.CalculateField_management(polyid_fc,"flattened_POLYID", "!POLYID!", "PYTHON");log(arcpy.GetMessages())
        arcpy.DeleteField_management(polyid_fc, "FREQUENCY;POLYID"); log(arcpy.GetMessages())
        log("Successful finish to flattening routine")
        return [final_fc, polyid_fc]

    except Exception as e:
        log("EXCEPTION hit: {}".format(e))
Esempio n. 23
0
 def explode(self, outputFeatureClass):
     if not arcpy.Exists(self.catalogPath):
         raise ExistsError
     if arcpy.Exists(outputFeatureClass):
         delete_feature_class(outputFeatureClass)
     try:
         arcpy.MultipartToSinglepart_management(self.catalogPath,
                                                outputFeatureClass)
     except:
         config.run_error_message(self.catalogPath,
                                  "Exploding mulitparts failure")
     finally:
         gc.collect()
Esempio n. 24
0
def get_aspect (feature, bin_mask, bins, workspace, raster_scaling = 1000):
    """Calculate aspect information from the given feature centerline based
    on a bin mask. A bin mask is one of the standard outputs from calculating 
    hypsometry. Aspect is 0 -360 degrees clockwise from north and starts from 
    the lowest elevation toward the highest."""
    try:
        aspect_list = [str(0.0)] * len(bins) # string list of 0.0 to return
        bin_list = bins # List of bin values
        centerline_list = [] # List to hold current features length and slope values
        
        rows = ARCPY.SearchCursor (bin_mask)
        for row in rows: # For each bin within the bin mask
            elevation_bin = int(row.GRIDCODE / raster_scaling) # Get bin value
            
            # Clip centerline to current bin and count the features generated
            clipped_line = ARCPY.Clip_analysis (feature, row.shape, 'in_memory\\clipped_line')
            feature_count = int(ARCPY.GetCount_management(clipped_line).getOutput(0))
            
            if feature_count > 0: # If there is 1 or more features (not empty)
                # Lines with multi-part features sometimes have reversed directions do to where
                # points are placed for the beginning and end of line segments within the multi-part line. 
                m_to_s = ARCPY.MultipartToSinglepart_management (clipped_line, 'in_memory\\m_to_s')
            
                # Calculate mean direction of lines
                direction = ARCPY.DirectionalMean_stats(m_to_s, 'in_memory\\direction_line', 'DIRECTION')  
                
                bin_aspects = []
                dir_rows = ARCPY.SearchCursor (direction) # Read the direction feature 
                for dir_row in dir_rows: # For each record (there should only be one
                    bin_aspects = dir_row.CompassA # Get direction
                del dir_row, dir_rows
                    
                # Add the current bin and average aspect to the centerline list
                centerline_list.append([elevation_bin, round(bin_aspects, 1)])
    
                ARCPY.Delete_management(direction) # Clean up temporary clip
                ARCPY.Delete_management(m_to_s) # Clean up temporary clip
            ARCPY.Delete_management(clipped_line) # Clean up temporary clip
        del row, rows
    
        # Look to see if there is an aspect value for the given bin
        for index, entry in enumerate (bin_list): # For each bin (all of them)
            bin_number = int(entry[1:]) # Convert string to int ('B150' to 150)
            for item in centerline_list: # For each item in current feature
                if item[0] == bin_number: # If item bin matches all bin 
                    aspect_list[index] = str(item[1]) # Place slope value
        
        return aspect_list, False
    except:
        return aspect_list, True
Esempio n. 25
0
def SpatialCluster(inFeats, fldID, searchDist, fldGrpID='grpID'):
    '''Clusters features based on specified search distance. Features within twice the search distance of each other will be assigned to the same group.
   inFeats = The input features to group
   fldID = The field containing unique feature IDs in inFeats
   searchDist = The search distance to use for clustering. This should be half of the max distance allowed to include features in the same cluster. E.g., if you want features within 500 m of each other to cluster, enter "250 METERS"
   fldGrpID = The desired name for the output grouping field. If not specified, it will be "grpID".'''

    # Initialize trash items list
    trashList = []

    # Delete the GrpID field from the input features, if it already exists.
    try:
        arcpy.DeleteField_management(inFeats, fldGrpID)
    except:
        pass

    # Buffer input features
    printMsg('Buffering input features')
    outBuff = scratchGDB + os.sep + 'outBuff'
    arcpy.Buffer_analysis(inFeats, outBuff, searchDist, '', '', 'ALL')
    trashList.append(outBuff)

    # Explode multipart  buffers
    printMsg('Exploding buffers')
    explBuff = scratchGDB + os.sep + 'explBuff'
    arcpy.MultipartToSinglepart_management(outBuff, explBuff)
    trashList.append(explBuff)

    # Add and populate grpID field in buffers
    printMsg('Adding and populating grouping field in buffers')
    arcpy.AddField_management(explBuff, fldGrpID, 'LONG')
    arcpy.CalculateField_management(explBuff, fldGrpID, '!OBJECTID!', 'PYTHON')

    # Spatial join buffers with input features
    printMsg('Performing spatial join between buffers and input features')
    joinFeats = scratchGDB + os.sep + 'joinFeats'
    arcpy.SpatialJoin_analysis(inFeats, explBuff, joinFeats, 'JOIN_ONE_TO_ONE',
                               'KEEP_ALL', '', 'WITHIN')
    trashList.append(joinFeats)

    # Join grpID field to input features
    # This employs a custom function because arcpy is stupid slow at this
    JoinFields(inFeats, fldID, joinFeats, 'TARGET_FID', [fldGrpID])

    # Cleanup: delete buffers, spatial join features
    garbagePickup(trashList)

    printMsg('Processing complete.')

    return inFeats
Esempio n. 26
0
def Multi_to_single(layer):
    
    multi = False
    len_before = int(str(arcpy.GetCount_management(layer)))
    temp_lyer = layer  + 'Temp'
    save_name = layer
    arcpy.MultipartToSinglepart_management (layer,temp_lyer)
    arcpy.Delete_management                (layer)
    arcpy.Rename_management                (temp_lyer,save_name)
    len_after = int(str(arcpy.GetCount_management(layer)))
    if len_after > len_before:
        multi = True

    return multi
Esempio n. 27
0
def procesar():
    #---------------------------
    arcpy.env.overwriteOutput = True
    avance = etapa1(cartografia, cws + os.path.sep + actas, cfecha, intersect)

    crearSalidas(avance, NoPlanif, Remanente)
    union = "union"
    funion = scr + os.path.sep + union

    fcarto = cws + os.path.sep + cartografia
    if not arcpy.Exists(fcarto):
        imprimir("PROBLEMAS CARTO=" + cartografia + " no existe")
        return
    factas = cws + os.path.sep + actas
    if not arcpy.Exists(factas):
        imprimir("PROBLEMAS ACTAS=" + actas + " no existe")
        return

    arcpy.Union_analysis([[fcarto, 1], [factas, 2]], funion, "all", eps)
    agregarCampos(funion, cfecha)

    noPlanificados = arcpy.AddFieldDelimiters(funion, "FID_" + actas) + " =-1"
    Remanentes = arcpy.AddFieldDelimiters(funion, "FID_" + cartografia) + "=-1"
    seleccion = "seleccion"
    arcpy.MakeFeatureLayer_management(funion, seleccion)
    arcpy.SelectLayerByAttribute_management(seleccion, "NEW_SELECTION",
                                            noPlanificados)
    arcpy.Append_management(seleccion, scr + os.path.sep + NoPlanif)

    arcpy.SelectLayerByAttribute_management(seleccion, "NEW_SELECTION",
                                            Remanentes)
    arcpy.Append_management(seleccion, scr + os.path.sep + Remanente,
                            "NO_TEST")

    if eliminarSliver:
        if eco:
            imprimir("\nEliminando Slivers en " + NoPlanif + "," + Remanente)

        sliver(scr + os.path.sep + NoPlanif)
        sliver(scr + os.path.sep + Remanente)
    # explode los remanantes
    if eco:
        imprimir("MULTIPART TO SINGLE PART...")
    arcpy.MultipartToSinglepart_management(Remanente, Remanente + "1")
    arcpy.Delete_management(Remanente)
    arcpy.Rename_management(Remanente + "1", Remanente)

    avanceDi = procesarAvance(actas)
    resumen(avanceDi)
Esempio n. 28
0
def explode(inputFeatureClass, outputFeatureClass):
    if not arcpy.Exists(inputFeatureClass):
        raise ExistsError
    if arcpy.Exists(outputFeatureClass):
        delete_feature_class(outputFeatureClass)
    try:
        explodeFlag = "false"
        arcpy.MultipartToSinglepart_management(inputFeatureClass,
                                               outputFeatureClass)
        explodeFlag = "true"
    except:
        config.run_error_message(inputFeatureClass,
                                 "Exploding mulitparts failure")
    gc.collect()
    return explodeFlag
Esempio n. 29
0
def Clean_Data(gdbs):
    TOTAL_ALL = 0

    for gdb in gdbs:
        print "Working on: {}".format(gdb)
        tazar = gdb + '\\' + 'PARCELS_inProc_edit'
        bankal_parcel = gdb + '\\' + 'PARCEL_ALL_EDIT'
        line_bankal = gdb + '\\' + 'PARCEL_ARC_EDIT'
        point_bankal = gdb + '\\' + 'PARCEL_NODE_EDIT'

        bankal_diss = gdb + '\\' + 'bankal_diss' + str(uuid.uuid4())[::11]
        bankal_Single = gdb + '\\' + 'bankal_Single' + str(uuid.uuid4())[::11]

        temp_name = os.path.basename(os.path.dirname(gdb)) + str(
            uuid.uuid4())[::11]

        if arcpy.Exists(bankal_diss):
            arcpy.Delete_management(bankal_diss)

        if arcpy.Exists(bankal_Single):
            arcpy.Delete_management(bankal_Single)

        arcpy.Dissolve_management(bankal_parcel, bankal_diss)
        arcpy.MultipartToSinglepart_management(bankal_diss, bankal_Single)
        arcpy.MakeFeatureLayer_management(bankal_Single, temp_name)
        arcpy.SelectLayerByLocation_management(temp_name, "INTERSECT", tazar,
                                               '', "NEW_SELECTION", "INVERT")
        arcpy.DeleteFeatures_management(temp_name)

        list1 = [bankal_parcel, line_bankal, point_bankal]
        for i in list1:
            feat_before = int(str(arcpy.GetCount_management(i)))

            name = str(uuid.uuid4())[::10]
            arcpy.MakeFeatureLayer_management(i, name)
            arcpy.SelectLayerByLocation_management(name, "INTERSECT",
                                                   bankal_Single, '0.1',
                                                   "NEW_SELECTION", "INVERT")
            arcpy.DeleteFeatures_management(name)

            feat_after = int(str(arcpy.GetCount_management(i)))

            deleted = feat_before - feat_after
            TOTAL_ALL += deleted
            print "Total feature deleted from: {} is: {}".format(
                os.path.basename(i).split('.')[0], deleted)

    print "TOTAL ALL FEATURES: {}".format(TOTAL_ALL)
Esempio n. 30
0
def minimal_bounding_poly(in_features, where=''):
    """get a bounding multipart geometry around the given features

    Parameters
    ----------
    in_features : str
        full path to input features
    iout_features : str
        full path to output features
    where : str, optional
        where clause

    Returns
    -------
    polygon : arcpy.Polygon
    """
    ws_tmp = arcpy.env.scratchGDB
    feat_tmp = join(ws_tmp, 'feat_tmp')
    feat_single = join(ws_tmp, 'feat_single')
    feat_minimal = join(ws_tmp, 'feat_minimal')
    out_union = join(ws_tmp, 'out_union')
    out_features = join(ws_tmp, 'out')

    #arcpy.Delete_management(ws_tmp)
    def del_tmp():
        for f in [
                feat_tmp, feat_single, feat_minimal, out_features, out_union
        ]:
            arcpy.Delete_management(f)

    del_tmp()
    arcpy.FeatureClassToFeatureClass_conversion(in_features,
                                                ws_tmp,
                                                split(feat_tmp)[1],
                                                where_clause=where)
    arcpy.MultipartToSinglepart_management(feat_tmp, feat_single)
    arcpy.MinimumBoundingGeometry_management(feat_single, feat_minimal,
                                             "RECTANGLE_BY_AREA", "NONE")
    arcpy.Union_analysis(feat_minimal, out_union, gaps="NO_GAPS")
    arcpy.Dissolve_management(out_union, out_features, "", "", "MULTI_PART",
                              "DISSOLVE_LINES")
    #arcpy.FillGaps_production(out_features)
    cursor = arcpy.da.SearchCursor(out_features, ['SHAPE@'])
    polygon = cursor.next()[0]
    del (cursor)

    del_tmp()
    return polygon