Ejemplo n.º 1
0
def WVT(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_TreeWV.gdb')
    except:
        print 'TreeWV GDB already exists'
    workDir = str(workFld) + '/' + city + '_TreeWV.gdb'
    arcpy.env.workspace = workDir
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
    """ Split Raster Directory """
    if os.path.isdir(str(workFld) + '/' + city + '_Split') == True:
        pass
    else:
        os.makedirs(str(workFld) + '/' + city + '_Split')
    splitDir = str(workFld) + '/' + city + '_Split'
    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_TreeWV_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        reportFile = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first line of report file """
        print 'Window Views of Trees Start Time: ' + time.asctime()
        reportFile.write(
            "Begin with EnviroAtlas 1-meter Land Cover for the EnviroAtlas community--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/LC'
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.overwriteOutput = True
        """-------- Reclassify LC into Binary Forest ----------------------------- """
        if arcpy.Exists(str(freqDir) + '/MForestIO') == False:
            outReclass = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 1], [52, 0], [70, 0],
                                     [80, 0], [82, 1], [91, 1], [92, 0]]))
            outReclass.save(str(freqDir) + '/MForestIO')
            reportFile.write(
                "Reclassify the Land Cover into a Forest binary REPLACE-MFE" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            print("Forest area reclassified to binary raster..." +
                  time.asctime())
            ReuseRF.write("MForestIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')

        else:
            print("Forest binary raster already exists..." + time.asctime())
            reportFile.write(
                "Reclassify the Land Cover into a Forest binary REPLACE-MFE--MForestIO"
                + '--\n')
        """-------- Create 50m Moving Window ------------------------------------- """
        MW = arcpy.sa.FocalStatistics(freqDir + '/MForestIO',
                                      arcpy.sa.NbrCircle(50, 'CELL'), 'SUM',
                                      'NODATA')
        MW.save('MFor_50C')
        print("Moving window complete..." + time.asctime())
        reportFile.write(
            "Run Focal Statistics on the Forest Binary Raster with a circular window of 50 meters and statistics = SUM.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Reclassify Moving Window into Trees/NoTrees ------------------- """
        ReC = arcpy.sa.Reclassify(
            'MFor_50C', 'Value',
            arcpy.sa.RemapRange([[0, 0.99999, 1], [0.99999, 10300, 0]]))
        ReC.save('NoForView')
        print(
            "Moving window completed and reclassified to tree / no trees..." +
            time.asctime())
        reportFile.write(
            "Reclassify the Focal Statistics into Forest (>0 -> 0) or No Forest (0 -> 1).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Split the Raster As Needs, Process Each Piece ----------------- """
        """ Check if the raster should be split """
        columns = arcpy.GetRasterProperties_management(
            'NoForView', 'COLUMNCOUNT').getOutput(0)
        xsplit = int(float(columns) / 20000) + 1
        rows = arcpy.GetRasterProperties_management('NoForView',
                                                    'ROWCOUNT').getOutput(0)
        ysplit = int(float(rows) / 20000) + 1
        """-------- If no split, run the analysis --------------------------------- """
        if xsplit * ysplit == 1:
            """ Convert Raster to Polygon """
            arcpy.RasterToPolygon_conversion('NoForView', 'NFVP_Whole',
                                             'NO_SIMPLIFY')
            """ Extract areas with no forest in 50m """
            arcpy.Select_analysis('NFVP_Whole', 'NFVS_Whole', 'gridcode=1')
            reportFile.write(
                "Convert the raster into a polygon and select the features where gridcode = 1.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Project into Albers for use with Dasymetric """
            arcpy.Project_management('NFVS_Whole', 'NFVA_Whole', prjfile)
            print("Raster small enough, carry on..." + time.asctime())
            reportFile.write("Convert the polygons into Albers projection.--" +
                             time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """-------- If split, run the analysis on each piece and recombine --------- """
        else:
            """ Delete the raster, if necessary """
            xy = (xsplit * ysplit)
            for rast in range(xy):
                try:
                    arcpy.Delete_management(splitDir + '/nfvsp_' + str(rast))
                except:
                    pass
            try:
                arcpy.Delete_management(splitDir + '/noforview')
            except:
                pass
            """ Split the Raster """
            arcpy.RasterToOtherFormat_conversion('NoForView', splitDir, 'GRID')
            print("Raster too big, splitting into " + str(xsplit) +
                  " rows and " + str(ysplit) + " columns..." + time.asctime())
            arcpy.SplitRaster_management(splitDir + '/NoForView', splitDir,
                                         'NFVSp_', 'NUMBER_OF_TILES', 'GRID',
                                         '',
                                         str(xsplit) + ' ' + str(ysplit))
            reportFile.write(
                "Split the raster into pieces for easier processing. The Python script determines the number of pieces based on the number of rows and columns in the raster where no piece can have a side larger than 20,000 cells--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ For each raster: """
            prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
            xy = (xsplit * ysplit)
            for Chunk in range(0, xy):
                try:
                    result = float(
                        arcpy.GetRasterProperties_management(
                            splitDir + '/NFVSp_' + str(Chunk),
                            'MEAN').getOutput(0))
                    """ If the raster piece has data: """
                    if (result > 0):
                        """ Set Environments """
                        arcpy.env.snapRaster = freqDir + '/MForestIO'
                        arcpy.env.extent = freqDir + '/MForestIO'
                        """ Copy the piece back to the Working Directory """
                        arcpy.RasterToOtherFormat_conversion(
                            splitDir + '/NFVSp_' + str(Chunk), workDir)
                        """ Convert Raster to Polygon """
                        arcpy.RasterToPolygon_conversion(
                            'NFVSp_' + str(Chunk), 'NFVP_' + str(Chunk),
                            'NO_SIMPLIFY')
                        """ Extract areas with no forest in 50m """
                        arcpy.Select_analysis('NFVP_' + str(Chunk),
                                              'NFVS_' + str(Chunk),
                                              'gridcode=1')
                        """ Project into Albers for use with Dasymetric """
                        arcpy.Project_management('NFVS_' + str(Chunk),
                                                 'NFVA_' + str(Chunk), prjfile)
                        print("Chunk " + str(Chunk) + " / " + str(xy) +
                              " processed..." + time.asctime())
                except:
                    pass
            reportFile.write(
                "For each piece, convert the raster into a polygon and select the features where gridcode = 1--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            reportFile.write(
                "For each piece, convert the polygons into Albers projection.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Combine the resulting polygons """
            NFVchunks = arcpy.ListFeatureClasses('NFVA_*')
            arcpy.Merge_management(NFVchunks, workDir + '/NFVA_Whole')
            print("All chunks remerged..." + time.asctime())
            reportFile.write(
                "Merge all of the projected polygons together.--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/Dasy'
        arcpy.env.extent = freqDir + '/Dasy'
        """-------- End of Split Processing ---------------------------------------- """
        """ Extract Dasymetric Pixels where there is no forest in 50m """
        EbM = arcpy.sa.ExtractByMask(freqDir + '/Dasy', 'NFVA_Whole')
        EbM.save('Pop_NoForView')
        reportFile.write(
            "Extract by Mask the EnviroAtlas Dasymetric (2011/October 2015) pixels within the polygons--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate the Extracted Dasy Population with Each CBG """
        arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                        'Pop_NoForView', 'BG_TWV', 'DATA',
                                        'SUM')
        reportFile.write(
            "Calculate Zonal Statistics as Table for the extracted dasymetrics with the zones being the 2010 block groups for the EnviroAtlas community.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Calculate Total Dasy Population, if necessary ------------------ """
        """ Use the existing data """
        fieldNames = [f.name for f in arcpy.ListFields(freqDir + '/BG_Alb')]
        if 'Dasy_Pop' in fieldNames:
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--Dasy_Pop"
                + '--\n')
            """ Create population data """
        else:
            arcpy.AddField_management(freqDir + '/BG_Alb', 'Dasy_Pop', 'LONG')
            ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                   freqDir + '/Dasy', freqDir + '/Dasy_ZS', '',
                                   'SUM')
            arcpy.JoinField_management(freqDir + '/BG_Alb', 'bgrp',
                                       freqDir + '/Dasy_ZS', 'bgrp', ['SUM'])
            arcpy.CalculateField_management(freqDir + '/BG_Alb', 'Dasy_Pop',
                                            '!SUM!', 'PYTHON_9.3')
            arcpy.DeleteField_management(freqDir + '/BG_Alb', ['SUM'])
            arcpy.JoinField_management(freqDir + '/BG', 'bgrp',
                                       freqDir + '/BG_Alb', 'bgrp',
                                       ['Dasy_Pop'])
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("Dasy_Pop--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        """-------- Create Final Table --------------------------------------------- """
        arcpy.TableToTable_conversion(freqDir + '/BG_Alb', workDir, 'TreeWV',
                                      '', 'bgrp')
        arcpy.DeleteField_management('TreeWV', [
            'PLx2_Pop', 'PLx2_Pct', 'SUM_HOUSIN', 'NonWhite', 'LandA_M',
            'Density', 'under_1', 'under_1pct', 'under_13', 'under_13pc',
            'over_70', 'over_70pct', 'Shape_Length', 'Shape_Leng',
            'NonWhite_Pop', 'NonWt_Pct', 'Area_M', 'Shape_Le_1', 'Shape_Area',
            'ALAND', 'NonWhite_P', 'H_Income_M', 'State'
        ])
        TreeView = 'TreeWV'
        reportFile.write(
            "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP and Dasy_Pop fields--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add fields to new table """
        arcpy.AddField_management(TreeView, 'WVT_Pop', 'LONG')
        arcpy.AddField_management(TreeView, 'WVT_Pct', 'FLOAT', 5, 2)
        reportFile.write(
            "Add fields to the new table for WVT_Pop (long), WVT_Pct (float).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join Each Table to the final table and WVW_Pop """
        arcpy.JoinField_management(TreeView, 'bgrp', 'BG_TWV', 'bgrp', ['SUM'])
        arcpy.CalculateField_management(TreeView, 'WVT_Pop', '!SUM!',
                                        'PYTHON_9.3')
        arcpy.MakeTableView_management(TreeView, 'TreeView_Tbl')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION', 'SUM IS NULL')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop', 0,
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('TreeView_Tbl', 'SUM')
        reportFile.write(
            "Join the zonal statistics table with the new table to calculate the new fields: WVT_Pop = zonal statistics.SUM; remove join--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate WVT_Pct """
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION', 'Dasy_Pop >0')
        arcpy.CalculateField_management(
            'TreeView_Tbl', 'WVT_Pct',
            '"%.2f" % (float(!WVT_Pop!)/float(!Dasy_Pop!) * 100)',
            'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        reportFile.write(
            "Calculate field WVT_Pct = WVT_Pop / Dasy_Pop * 100 (limited to 2 decimal places).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate NULL values, where applicable """
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION',
                                                'Dasy_Pop = 0')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pct', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop', '-99999',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('TreeView_Tbl', [
            'SUM_POP10', 'EAID', 'NonWhite', 'LandA_M', 'Density', 'Dasy_Pop',
            'SUM'
        ])
        print("Dasy raster summarized to BGs and stats calculated..." +
              time.asctime())
        reportFile.write(
            "Calculate fields where Dasy_Pop = 0: All Fields = -99999.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            arcpy.env.extent = freqDir + '/LC'
            arcpy.env.snapRaster = freqDir + '/LC'
            ReC = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
        """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
        """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            """ Identify BGs within 50m of the LC edge """
            arcpy.Buffer_analysis(
                str(freqDir) + '/LC_Poly', 'LC_Poly_Minus50', '-50 meters',
                'FULL', 'FLAT', 'ALL')
            arcpy.MakeFeatureLayer_management('LC_Poly_Minus50', 'Minus50')
            arcpy.MakeFeatureLayer_management(freqDir + '/BG', 'BG')

            arcpy.SelectLayerByLocation_management('BG', 'COMPLETELY_WITHIN',
                                                   'Minus50', '',
                                                   'NEW_SELECTION', 'INVERT')

            bgValue = float(arcpy.GetCount_management('BG').getOutput(0))
            print("LC extends beyond BG boundary, carry on..." +
                  time.asctime())
            """ For all BGs too close to the LC edge, assign both fields a value of -99998 """
            if bgValue > 0:
                bgrps = []
                cursor = arcpy.SearchCursor('BG')
                for row in cursor:
                    value = row.getValue('bgrp')
                    bgrps.append(value)
                bgrps = list(set(bgrps))
                expression = ''
                for bgrp in bgrps:
                    expression = expression + " OR bgrp = '" + str(bgrp) + "'"
                expression = expression[4:]
                arcpy.SelectLayerByAttribute_management(
                    'TreeView_Tbl', 'NEW_SELECTION', expression)
                arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop',
                                                '-99998', 'PYTHON_9.3')
                arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pct',
                                                '-99998', 'PYTHON_9.3')
                arcpy.SelectLayerByAttribute_management(
                    'TreeView_Tbl', 'CLEAR_SELECTION')
            print(
                "LC doesn't extend beyond BGs, removing border BGs from analysis..."
                + time.asctime())
            reportFile.write(
                "Calculate Field for BGs within 50m of the edge of the land cover, WVT_Pop and WVW_Pct = -99998.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final table """
        arcpy.CopyRows_management('TreeView_Tbl', 'TreeWV_Fnl')
        try:
            arcpy.Delete_management(finalDir + '/' + str(city) + '_TreeWV')
        except:
            pass
        arcpy.TableToTable_conversion('TreeWV_Fnl', finalDir, city + '_TreeWV')
        allFields = [
            f.name for f in arcpy.ListFields(finalDir + '/' + city + '_TreeWV')
        ]
        for field in allFields:
            if field not in ['bgrp', 'OBJECTID', 'WVT_Pop', 'WVT_Pct']:
                arcpy.DeleteField_management(finalDir + '/' + city + '_TreeWV',
                                             [field])
            print 'Window Views of Trees End Time: ' + time.asctime() + '\n'
        reportFile.write(
            "Export the fields to be displayed in the EnviroAtlas to a final gdb table. WVT_Pop, WVT_Pct--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- COMPELETE LOGFILES ---------------------------------------------
        reportFile.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        reportFile.write("\nSomething went wrong.\n\n")
        reportFile.write("Pyton Traceback Message below:")
        reportFile.write(traceback.format_exc())
        reportFile.write("\nArcMap Error Messages below:")
        reportFile.write(arcpy.GetMessages(2))
        reportFile.write("\nArcMap Warning Messages below:")
        reportFile.write(arcpy.GetMessages(1))

        reportFile.write("\n\nEnded at " + time.asctime() + '\n')
        reportFile.write("\n---End of Log File---\n")

        if reportFile:
            reportFile.close()
Ejemplo n.º 2
0
Created on Fri Oct 19 11:54:14 2018

@author: derekolson
"""

import arcpy
from arcpy import env
import os
import geopandas

env.workspace = r"C:\Users\derekolson\Downloads\FireSevDates\FireSevDates"

fcNames = list()
fcList = list()

for root, dirs, files, in os.walk(
        r"C:\Users\derekolson\Downloads\FireSevDates\FireSevDates"):
    for file in files:
        if file.endswith(".shp"):
            print(os.path.join(root, file))
            fileName = os.path.join(root, file)
            fcList.append(fileName)
            fcNames.append(file)

arcpy.Merge_management(fcList, "F:/Fires/firesMerged.shp")

arcpy.Dissolve_management("F:/Fires/firesMerged.shp",
                          "F:/Fires/firesMerged_dissolved.shp",
                          ["Year", "StartMonth", "StartDay"], "", "MULTI_PART",
                          "DISSOLVE_LINES")
Ejemplo n.º 3
0
    edit.startOperation()

    if table == attachment_table_1 or table == attachment_table_2:
        oid = "REL_OBJECTID"
    else: oid = "OID@"
    
    with arcpy.da.UpdateCursor(table, [oid, fields[table]]) as cur:
        for row in cur:
            row[1] = row[0]
            cur.updateRow(row)            
    edit.stopOperation()
    edit.stopEditing(True)

print("Merging {0} and {1}, creating {2}".format(feature_class_1, feature_class_2, output_feature_class))
FILE.write("Merging {0} and {1}, creating {2}\n".format(feature_class_1, feature_class_2, output_feature_class))
arcpy.Merge_management([feature_class_1, feature_class_2], output_feature_class)

print("Enabling attachments on {0}, creating {1}".format(output_feature_class, attachment_table_merge))
FILE.write("Enabling attachments on {0}, creating {1}\n".format(output_feature_class, attachment_table_merge))
arcpy.EnableAttachments_management(output_feature_class)
     
arcpy.MakeFeatureLayer_management(output_feature_class, jointable)

for table in [[attachment_table_1, feature_class_1, firsttable],[attachment_table_2, feature_class_2, secondtable]]:
    tableview = "testmergedattachments"
    arcpy.MakeTableView_management(table[0], tableview)
    arcpy.AddJoin_management(tableview, fields[table[0]], jointable, fields[table[1]], "KEEP_COMMON")

    print("Creating table {0}".format(table[2]))
    FILE.write("Creating table {0}\n".format(table[2]))
    arcpy.TableToTable_conversion(tableview, arcpy.env.workspace, table[2])
        arcpy.AddMessage(str(tileNum) + "     Identity vegetation and intersections...")
        outMobilityTile = os.path.join(scratch,"tile_" + str(tileOID))
        arcpy.CopyFeatures_management(slopeClip,outMobilityTile)
        tileList.append(outMobilityTile)
        deleteme.append(outMobilityTile)
        
        # TODO: Every 10 tiles, APPEND to output 'tileMerge' (rather than MERGE them at the end)
        
        if debug == True: arcpy.AddMessage("Finish tile " + str(tileNum) + " : " + str(time.strftime("%m/%d/%Y  %H:%M:%S", time.localtime())))
        arcpy.AddMessage(str(tileNum) + "     Finished with tile...")
        tileNum += 1

    # Merge tiles into one.
    tileMerge = os.path.join("in_memory","tileMerge")
    arcpy.AddMessage("Merging " + str(numTiles) + " tiles for final SIF (F2) count...")
    arcpy.Merge_management(tileList,tileMerge)
    deleteme.append(tileMerge)

    # SlopeCat with f1
    # tileMerge with f2
    f1_f2 = os.path.join(scratch,"f1_f2")
    arcpy.Identity_analysis(slopePoly,tileMerge,f1_f2)
    deleteme.append(f1_f2)
    
    # vegetation with f3min/f3max OPTIONAL
    # clipSoils with f4wet/f4dry OPTIONAL
    # clipRoughness with f5 OPTIONAL
    if len(intersectionList) == 0:
        arcpy.AddMessage("Identity: F1 & F2 only.")
        arcpy.CopyFeatures_management(f1_f2,outputCCM)
    if len(intersectionList) == 1:
def main():
    ''' main '''
    try:

        arcpy.AddMessage("Getting database qualifier string ...")
        qualifierString = GetQualifierName(inputTDSFeatureDataset)
        if DEBUG is True:
            arcpy.AddMessage("qualifier string: " + qualifierString)

        fqClassesToMerge = [
            str(qualifierString + i) for i in FEATURECLASSES_TO_MERGE
        ]
        if DEBUG is True:
            arcpy.AddMessage("fqClassesToMerge: " + str(fqClassesToMerge))

        workspace = os.path.dirname(inputTDSFeatureDataset)
        tdsFeatureClasses = FeatureClassesFromWorkspace(inputTDSFeatureDataset)
        if DEBUG is True:
            arcpy.AddMessage("tdsFeatureClasses: " + str(tdsFeatureClasses))

        # now go through the list of all of them and see which names match our target list,
        # if so, add them to a new list
        arcpy.AddMessage("Building list of input features ...")
        newList = [str(os.path.join(workspace, os.path.basename(inputTDSFeatureDataset), fc))\
                   for fc in tdsFeatureClasses if fc in fqClassesToMerge]
        if DEBUG is True: arcpy.AddMessage("newList: " + str(newList))

        # output feature class name
        target = os.path.join(inputMAOTWorkspace,
                              "CombinedLinearObstacleFeature")
        if DEBUG is True: arcpy.AddMessage("target: " + str(target))

        # merge all FCs into the target FC
        arcpy.AddMessage(
            "Merging features to output (this may take some time)...")
        arcpy.Merge_management(newList, target)

        # set output
        arcpy.AddMessage("Setting output ...")
        arcpy.SetParameter(2, target)

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        # print msgs #UPDATE
        print(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + \
                str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python Window
        # print pymsg + "\n" #UPDATE
        print(pymsg + "\n")
        # print msgs #UPDATE
        print(msgs)
                print "Pass"


            ###

            arcpy.AddField_management(dissolve_sum, "Per", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
            arcpy.CalculateField_management(dissolve_sum, "Per", "\"T1\"", "VB", "")

            arcpy.AddField_management(dissolve_summ, "Per", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
            arcpy.CalculateField_management(dissolve_summ, "Per", "\"T2\"", "VB", "")

        ##
            merge_file = r"%s\%s_T%s_shares.shp" %(output,dirname,y)
        
            if y == 3:
                arcpy.Merge_management([dissolve_sum, dissolve_summ,dissolve_summm], merge_file, "")
            else:
                arcpy.Merge_management([dissolve_sum, dissolve_summ], merge_file, "")

            #arcpy.Merge_management([dissolve_sum, dissolve_summ], merge_file, "")

            SC = arcpy.SearchCursor(merge_file)
            yy = 0
            for x in SC:
                yy = yy + x.getValue('SUM_Pop')

            arcpy.AddField_management(merge_file, "Share", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
        
            expression = "!SUM_Pop!"+"/"+"%s" % yy
            arcpy.CalculateField_management(merge_file, "Share", expression, "PYTHON_9.3", "" ) #% yy
    # Run the arcpy buffer tool
    work_buffer_out = arcpy.Buffer_analysis(item_to_buffer,
                                            work_buffer_out_path,
                                            buffer_size_string).getOutput(0)
    # Add our output buffer to the temp output buffer container
    temp_output_list.append(work_buffer_out)

# Step 2

print("merging the output buffers...")
# Convert list of buffer output paths to a semicolon delimited string
bufferList = "; ".join(temp_output_list)

# Execute Merge
mergedBuffers = os.path.join(workspace_gdb, "mergedBuffers")
arcpy.Merge_management(bufferList, mergedBuffers)

# Step 3

print("clipping merged buffer layer...")
# Clip merged layers to country boundary
clippedMerge = os.path.join(workspace_gdb, "clippedMerge")
arcpy.Clip_analysis(mergedBuffers, clipTo, clippedMerge)

# Step 4

print("dissolving...")
# Execute Dissolve
dissolvedBuffers = os.path.join(workspace_gdb, "dissolvedBuffers")
arcpy.Dissolve_management(clippedMerge, dissolvedBuffers)
def MergeTables(DBF_dir,zstat_table):
    arcpy.env.workspace = DBF_dir
    tableList = arcpy.ListTables()      
    arcpy.Merge_management(tableList,zstat_table)
    print "Merged tables. Final zonalstat table {0} created. Located at {1}".format(zstat_table,DBF_dir)
    del tableList
Ejemplo n.º 9
0
# Process: Add Field nodeID
arcpy.AddField_management(gaul_singleparted_shape_simpl, "nodeID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")

# Process: Add Field ISO3final
arcpy.AddField_management(gaul_singleparted_shape_simpl, "ISO3final", "TEXT", "", "", "50", "", "NULLABLE", "NON_REQUIRED", "")

# Process: Calculate Field nodeID
arcpy.CalculateField_management(gaul_singleparted_shape_simpl, "nodeID", "!OBJECTID! +1+100000", "PYTHON_9.3", "")

# Process: Calculate Field ISO3final
arcpy.CalculateField_management(gaul_singleparted_shape_simpl, "ISO3final", "!iso3!", "PYTHON_9.3", "")
print("Additional fields added and calculated")

# Process: Merge
arcpy.Merge_management("Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl;Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final", Merge_WDPA_flat_land, "objectid \"objectid\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,objectid,-1,-1;id_object \"id_object\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,id_object,-1,-1;id_country \"id_country\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,id_country,-1,-1;name_iso31 \"name_iso31\" true true false 254 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,name_iso31,-1,-1;iso3 \"iso3\" true true false 254 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,iso3,-1,-1;orig_fid \"orig_fid\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,orig_fid,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,ORIG_FID,-1,-1;AREA_GEO \"AREA_GEO\" true true false 3407925 Double 57 3670065 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,AREA_GEO,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,AREA_GEO,-1,-1;nodeID \"nodeID\" true true false 0 Long 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,nodeID,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,nodeID,-1,-1;ISO3final \"ISO3final\" true true false 50 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,ISO3final,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,ISO3final,-1,-1;Shape_Length \"Shape_Length\" false true true 8 Double 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,Shape_Length,-1,-1;Shape_Area \"Shape_Area\" false true true 8 Double 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,Shape_Area,-1,-1")
print("Gaul and wdpa_flat merged")

# Process: Make Feature Layer
arcpy.MakeFeatureLayer_management(Merge_WDPA_flat_land, Merge_WDPA_flat_land_lyr, "\"nodeID\" >100000", "", "OBJECTID_1 OBJECTID_1 VISIBLE NONE;Shape Shape VISIBLE NONE;objectid objectid VISIBLE NONE;id_object id_object VISIBLE NONE;id_country id_country VISIBLE NONE;name_iso31 name_iso31 VISIBLE NONE;sovereign_ sovereign_ VISIBLE NONE;sovereig_1 sovereig_1 VISIBLE NONE;sovereig_2 sovereig_2 VISIBLE NONE;iso3 iso3 VISIBLE NONE;iso2 iso2 VISIBLE NONE;un_m49 un_m49 VISIBLE NONE;source source VISIBLE NONE;status status VISIBLE NONE;original_d original_d VISIBLE NONE;original_n original_n VISIBLE NONE;source_cod source_cod VISIBLE NONE;sqkm sqkm VISIBLE NONE;orig_fid orig_fid VISIBLE NONE;AREA_GEO AREA_GEO VISIBLE NONE;cte0 cte0 VISIBLE NONE;nodeID nodeID VISIBLE NONE;ISO3final ISO3final VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;Shape_Area Shape_Area VISIBLE NONE")

# Process: Calculate Field AREA_GEO=0
arcpy.CalculateField_management(Merge_WDPA_flat_land_lyr, "AREA_GEO", "0", "PYTHON_9.3", "")
print("Area_geo set to 0 for features coming from gaul (nodeid>100000)")

# Process: Repair Geometry (2)
arcpy.RepairGeometry_management(Merge_WDPA_flat_land, "DELETE_NULL")
print("Geometries repaired, once again...")

# Process: Delete Field
arcpy.DeleteField_management(Merge_WDPA_flat_land, "id_object;id_country;name_iso31;sovereign_;sovereig_1;sovereig_2;iso3;iso2;un_m49;source;status;original_d;original_n;source_cod;sqkm;orig_fid;cte0")
Ejemplo n.º 10
0
#clip 7 days
#arcpy.Clip_analysis(gfs_7day,costa_rica,gfs_7day_cr)
#create_time_fields
#arcpy.AddField_management(gfs_7day_cr,'inicio', 'DATE')
#arcpy.AddField_management(gfs_7day_cr,'final', 'DATE')
#Calculate start and end dates
#arcpy.CalculateField_management(gfs_7day_cr, 'inicio','datetime.datetime.now().replace(hour=0, minute=0, second=0)', 'PYTHON3')
#arcpy.CalculateField_management(gfs_7day_cr, 'final','datetime.datetime.now().replace(hour=23, minute=59, second=59)', 'PYTHON3')

#Merge of all cliped layers
print('Mergging all layers')
arcpy.Merge_management(
    [
        gfs_24h_cr, gfs_48h_cr, gfs_72h_cr, gfs_96h_cr, gfs_120h_cr,
        gfs_144h_cr, gfs_168h_cr
    ], project_gdb + r'\precipitacion',
    r'Contour "Contour" true true false 4 Long 0 0,First,#,gfs_168h_cr,Contour,-1,-1,gfs_120h_cr,Contour,-1,-1,gfs_144h_cr,Contour,-1,-1,gfs_24h_cr,Contour,-1,-1,gfs_48h_cr,Contour,-1,-1,gfs_72h_cr,Contour,-1,-1,gfs_96h_cr,Contour,-1,-1;inicio "inicio" true true false 8 Date 0 0,First,#,gfs_168h_cr,inicio,-1,-1,gfs_120h_cr,inicio,-1,-1,gfs_144h_cr,inicio,-1,-1,gfs_24h_cr,inicio,-1,-1,gfs_48h_cr,inicio,-1,-1,gfs_72h_cr,inicio,-1,-1,gfs_96h_cr,inicio,-1,-1;final "final" true true false 8 Date 0 0,First,#,gfs_168h_cr,final,-1,-1,gfs_120h_cr,final,-1,-1,gfs_144h_cr,final,-1,-1,gfs_24h_cr,final,-1,-1,gfs_48h_cr,final,-1,-1,gfs_72h_cr,final,-1,-1,gfs_96h_cr,final,-1,-1',
    "NO_SOURCE_INFO")
print('layers merged')

#create label field
arcpy.AddField_management(project_gdb + r'\precipitacion', 'label', 'text')
arcpy.management.CalculateField(
    project_gdb + r'\precipitacion', "label", "stringday(!inicio!, !Contour!)",
    "PYTHON3",
    "def stringday(inicio, mm):\n    weekDays = (\"Lunes\",\"Martes\",\"Miercoles\",\"Jueves\","
    +
    "\"Viernes\",\"Sabado\",\"Domingo\")\n    day = inicio.weekday()\n    daystring = weekDay"
    +
    "s[day]\n    label = \'El \' + daystring + \' lloverá \' + str(mm) + \'mm en esta región\'\n    return(l"
url = r'https://services7.arcgis.com/bRi0AN5rG57dCDE4/arcgis/rest/services/NGD_STREET_Redline/FeatureServer'
file_name = 'NGD_STREET Redline'
o_gdb = r'H:\automate_AGOL_download\AGOL_tests.gdb'
o_name = 'RedLine_test'
NGD_UIDs = False

#--------------------------------------------------------------------------------------------------------------
#Calls
print('Running calls')
results = auto_download_data(url, o_gdb, o_name, NGD_UIDs, '2019-04-10', '2021-04-15')
rename_the_fields(results)
print('Filtering')
filtered = filter_data_remove_duplicates(results, o_gdb, o_name)
print('Running address field QC checks')
checked = address_field_check(filtered, o_gdb, o_name, NGD_UIDs)

if checked[1] == None:
    arcpy.FeatureClassToFeatureClass_conversion(checked[0], o_gdb, o_name)
if checked[1] != None:
    fix_address_field_errors(checked[1], o_gdb, o_name)
    arcpy.Delete_management(os.path.join(o_gdb, o_name))
    if checked[0] != None:
        print(checked)
        arcpy.Merge_management(checked, os.path.join(o_gdb, o_name))
    else: 
        arcpy.FeatureClassToFeatureClass_conversion(checked[1], o_gdb, o_name)
        arcpy.Delete_management(checked[1])

print('DONE!')
def RotateFeatureClass(inputFC, outputFC, angle=0, pivot_point=None):
    """Rotate Feature Class

    inputFC     Input features
    outputFC    Output feature class
    angle       Angle to rotate, in degrees
    pivot_point X,Y coordinates (as space-separated string)
                Default is lower-left of inputFC

    As the output feature class no longer has a "real" xy locations,
    after rotation, it no coordinate system defined.
    """
    def RotateXY(x, y, xc=0, yc=0, angle=0, units="DEGREES"):
        """Rotate an xy cooordinate about a specified origin

        x,y      xy coordinates
        xc,yc   center of rotation
        angle   angle
        units    "DEGREES" (default) or "RADIANS"
        """
        x = x - xc
        y = y - yc
        # make angle clockwise (like Rotate_management)
        angle = angle * -1
        if units == "DEGREES":
            angle = math.radians(angle)
        xr = (x * math.cos(angle)) - (y * math.sin(angle)) + xc
        yr = (x * math.sin(angle)) + (y * math.cos(angle)) + yc
        return xr, yr

    # temp names for cleanup
    env_file = None
    lyrFC, lyrTmp, lyrOut = [None] * 3  # layers
    tmpFC = None  # temp dataset
    Row, Rows, oRow, oRows = [None] * 4  # cursors

    try:
        # process parameters
        try:
            xcen, ycen = [float(xy) for xy in pivot_point.split()]
            pivot_point = xcen, ycen
        except:
            # if pivot point was not specified, get it from
            # the lower-left corner of the feature class
            ext = arcpy.Describe(inputFC).extent
            xcen, ycen = ext.XMin, ext.YMin
            pivot_point = xcen, ycen

        angle = float(angle)

        # set up environment
        env_file = arcpy.CreateScratchName("xxenv", ".xml", "file",
                                           os.environ["TEMP"])
        arcpy.SaveSettings(env_file)

        # Disable any GP environment clips or project on the fly
        arcpy.ClearEnvironment("extent")
        arcpy.ClearEnvironment("outputCoordinateSystem")
        WKS = env.workspace
        if not WKS:
            if os.path.dirname(outputFC):
                WKS = os.path.dirname(outputFC)
            else:
                WKS = os.path.dirname(arcpy.Describe(inputFC).catalogPath)
        env.workspace = env.scratchWorkspace = WKS

        # Disable GP environment clips or project on the fly
        arcpy.ClearEnvironment("extent")
        arcpy.ClearEnvironment("outputCoordinateSystem")

        # get feature class properties
        lyrFC = 'lyrFC'  #g_ESRI_variable_1
        arcpy.MakeFeatureLayer_management(inputFC, lyrFC)
        dFC = arcpy.Describe(lyrFC)
        shpField = dFC.shapeFieldName
        shpType = dFC.shapeType
        FID = dFC.OIDFieldName

        # create temp feature class
        tmpFC = arcpy.CreateScratchName("xxfc", "", "featureclass")
        arcpy.CreateFeatureclass_management(os.path.dirname(tmpFC),
                                            os.path.basename(tmpFC), shpType)
        lyrTmp = 'lyrTmp'  #g_ESRI_variable_2
        arcpy.MakeFeatureLayer_management(tmpFC, lyrTmp)

        # set up id field (used to join later)
        TFID = "XXXX_FID"
        arcpy.AddField_management(lyrTmp, TFID, "LONG")
        arcpy.DeleteField_management(lyrTmp, 'ID')  # g_ESRI_variable_3 = 'ID'

        # rotate the feature class coordinates
        # only points, polylines, and polygons are supported

        # open read and write cursors
        Rows = arcpy.SearchCursor(lyrFC, "", "", "%s;%s" % (shpField, FID))
        oRows = arcpy.InsertCursor(lyrTmp)
        arcpy.AddMessage("Opened search cursor")
        if shpType == "Point":
            for Row in Rows:
                shp = Row.getValue(shpField)
                pnt = shp.getPart()
                pnt.X, pnt.Y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle)
                oRow = oRows.newRow()
                oRow.setValue(shpField, pnt)
                oRow.setValue(TFID, Row.getValue(FID))
                oRows.insertRow(oRow)
        elif shpType in ["Polyline", "Polygon"]:
            parts = arcpy.Array()
            rings = arcpy.Array()
            ring = arcpy.Array()
            for Row in Rows:
                shp = Row.getValue(shpField)
                p = 0
                for part in shp:
                    for pnt in part:
                        if pnt:
                            x, y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle)
                            ring.add(arcpy.Point(x, y, pnt.ID))
                        else:
                            # if we have a ring, save it
                            if len(ring) > 0:
                                rings.add(ring)
                                ring.removeAll()
                    # we have our last ring, add it
                    rings.add(ring)
                    ring.removeAll()
                    # if only one, remove nesting
                    if len(rings) == 1: rings = rings.getObject(0)
                    parts.add(rings)
                    rings.removeAll()
                    p += 1

                # if only one, remove nesting
                if len(parts) == 1: parts = parts.getObject(0)
                if dFC.shapeType == "Polyline":
                    shp = arcpy.Polyline(parts)
                else:
                    shp = arcpy.Polygon(parts)
                parts.removeAll()
                oRow = oRows.newRow()
                oRow.setValue(shpField, shp)
                oRow.setValue(TFID, Row.getValue(FID))
                oRows.insertRow(oRow)
        else:
            raise Exception("Shape type {0} is not supported".format(shpType))

        del oRow, oRows  # close write cursor (ensure buffer written)
        oRow, oRows = None, None  # restore variables for cleanup

        # join attributes, and copy to output
        arcpy.AddJoin_management(lyrTmp, TFID, lyrFC, FID)
        env.qualifiedFieldNames = False
        arcpy.Merge_management(lyrTmp, outputFC)
        lyrOut = 'lyrOut'  #g_ESRI_variable_4
        arcpy.MakeFeatureLayer_management(outputFC, lyrOut)
        # drop temp fields 2,3 (TFID, FID)
        fnames = [f.name for f in arcpy.ListFields(lyrOut)]
        dropList = ';'.join(fnames[2:4])  #g_ESRI_variable_5 = ';'
        arcpy.DeleteField_management(lyrOut, dropList)

    except MsgError as xmsg:
        arcpy.AddError(str(xmsg))
    except arcpy.ExecuteError:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo.strip())
        arcpy.AddError(arcpy.GetMessages())
        numMsg = arcpy.GetMessageCount()
        for i in range(0, numMsg):
            arcpy.AddReturnMessage(i)
    except Exception as xmsg:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo + str(xmsg))
    finally:
        # reset environment
        if env_file: arcpy.LoadSettings(env_file)
        # Clean up temp files
        for f in [lyrFC, lyrTmp, lyrOut, tmpFC, env_file]:
            try:
                if f: arcpy.Delete_management(f)
            except:
                pass
        # delete cursors
        try:
            for c in [Row, Rows, oRow, oRows]:
                del c
        except:
            pass

        # return pivot point
        try:
            pivot_point = "{0} {1}".format(*pivot_point)
        except:
            pivot_point = None

        return pivot_point
for root, dirs, files in os.walk():  # Walk through the input path
    folderlist.append(root)  # Append the directories to folderlist
'''
Iterate through the folderlist, calling os.listdir() on each folder to isolate
each file in the directory.  If that file has a '.shp' extension, append it
to the shapefiles list using os.path.join() to connect the file to its directory
'''

shapefiles = []
for folder in folderlist:  # Loop through the folder list
    for i in os.listdir(folder):  # Call listdir on the folder
        if i.endswith(
                '.shp'
        ):  # Use a conditional statement to verify if the file has a '.shp' extension
            shapefiles.append(
                os.path.join(folder, i)
            )  # Append the file to the shapefiles list using the join method

# Now lets try to do our merge
OutPath = r"C:/Users/z02sjt/Desktop/PythonWorkspace/ArcPyFun.gdb"
mergedSchoolDistricts = "C:/Users/z02sjt/Desktop/PythonWorkspace/ArcPyFun.gdb/All_SchoolDists"
arcpy.Merge_management(shapefiles, mergedSchoolDistricts,
                       OutPath)  # It f****n worked!

# Now lets re-project
env.workspace = OutPath
outCS = "C:/Users/z02sjt/AppData/Roaming/ESRI/Desktop10.5/ArcMap/Coordinate Systems/NAD83 Albers Equal Area, 48 States, Panhandle, US Foot.prj"
input_features = "C:/Users/z02sjt/Desktop/PythonWorkspace/ArcPyFun.gdb/All_SchoolDists"
output_features = os.path.join(OutPath, "All_SchoolDists_Proj")
arcpy.Project_management(input_features, output_features, outCS)
Ejemplo n.º 14
0
featurelist = arcpy.ListFeatureClasses()
print(featurelist)

for feature in featurelist:
    featurepath = gdbpath + '\\' + feature
    featurepath108 = temptpath + '\\' + feature + '108'
    arcpy.CopyFeatures_management(featurepath, featurepath108)
    arcpy.DefineProjection_management(featurepath, newprojection)
    arcpy.DefineProjection_management(featurepath108, firstprojection)
    featurepath108to111 = featurepath108 + 'to111'
    arcpy.Project_management(featurepath108, featurepath108to111,
                             newprojection)

    # 合并要素
    mergefile = mergefilefolder + feature + '.shp'
    arcpy.Merge_management(inputs=[featurepath, featurepath108to111],
                           output=mergefile,
                           add_source="NO_SOURCE_INFO")

    # 删除范围以外的要素
    Layer_With_Selection, Output_Layer_Names, Count = arcpy.SelectLayerByLocation_management(
        in_layer=[mergefile],
        overlap_type="INTERSECT",
        select_features=fanwei,
        search_distance="",
        selection_type="NEW_SELECTION",
        invert_spatial_relationship="NOT_INVERT")
    Updated_Input_With_Rows_Removed = arcpy.DeleteRows_management(
        in_rows=Layer_With_Selection)[0]
    print('%s处理完成' % (feature))
Ejemplo n.º 15
0
    arcpy.env.workspace = str_cc + str
    shps = arcpy.ListFeatureClasses()
    for shp_add in shps_add:
        for shp in shps:
            if shp.find(shp_add) >= 0:
                arcpy.Copy_management(shp, str_cc_change + str + "\\" +shp)
                
for str in strs:
    arcpy.env.workspace = str_cc_change + str
    shps = arcpy.ListFeatureClasses()
    print len(shps)
    
for str in strs:
    arcpy.env.workspace = str_cc_change + str
    shps = arcpy.ListFeatureClasses()
    arcpy.Merge_management(shps, str + "_change.shp")
    
mxd = arcpy.mapping.MapDocument("CURRENT")
df = arcpy.mapping.ListDataFrames(mxd)[0]
lyrs = arcpy.mapping.ListLayers(df)
for i in range(0,4):
    cursor = arcpy.da.SearchCursor(lyrs[i], "area")
    sum = 0
    for row in cursor:
        sum = sum + row[0]
    print lyrs[i] + sum
    
for i in range(0,4):
    cursor = arcpy.da.SearchCursor(lyrs[i], "area")
    sum = 0
    for row in cursor:
Ejemplo n.º 16
0
    if debug == True: arcpy.AddMessage("utdsFeatureClasses: " + str(utdsFeatureClasses))
    
    # now go through the list of all of them and see which names match our target list, if so, add them to a new list
    arcpy.AddMessage("Building list of input features ...")
    for fc in utdsFeatureClasses:
        if fc in fqClassesToMerge:
            newList.append(str(os.path.join(workspace,featureDatasetName,fc)))
    if debug == True: arcpy.AddMessage("newList: " + str(newList))

    # output feature class name
    target = os.path.join(inputMAOTWorkspace,"CombinedVegetationCoverage")
    if debug == True: arcpy.AddMessage("target: " + str(target))
    
    # merge all FCs into the target FC
    arcpy.AddMessage("Merging features to output (this may take some time)...")
    arcpy.Merge_management(newList,target)
    
    # set output
    if debug == True: arcpy.AddMessage("Setting output ...")
    arcpy.SetParameter(2,target)
    
except arcpy.ExecuteError: 
    # Get the tool error messages 
    msgs = arcpy.GetMessages() 
    arcpy.AddError(msgs) 
    print msgs

except:
    # Get the traceback object
    tb = sys.exc_info()[2]
    tbinfo = traceback.format_tb(tb)[0]
Ejemplo n.º 17
0
    fcs[i:i + merge_feature_count]
    for i in xrange(0, len(fcs), merge_feature_count)
]  #result = [urls[i:i+1000] for i in xrange(0, len(urls), 1000)]

#for i in fcssl:
#print i
#fcsslice =# ''.join(i  for i in fcssl)

print fcssl
#print "Starting!"
merg_cnt = len(fcs)
nam = 0
for j in fcssl:
    print j
    global nam, merg_cnt
    out = fc + "\{0}{1}".format(
        str(merged_feature_prefix), str(nam)
    )  ## Change Output GDB and Name too Here!!!!!!!!merged_feature_prefix
    print out
    if arcpy.Exists(out):
        print "Avoided Megring since it Exists!!!!!!"
    else:
        #arcpy.SetProgressor("step","Merging FCs",0,len(fcs),1)
        print "Doing Merge! %s" % str(nam)
        arcpy.Merge_management(j, out)
        nam += 1
        print "Completed Merge %s !" % nam
    #print "Merge Raining %s"%merg_cnt
    #merg_cnt-=out.index("%")-out.index("%")-
print "Done!"
Ejemplo n.º 18
0
OUTFALLS__4_ = "D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS"
OUTFALLS__2_ = "D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS"
Outfalls_All = "D:\\GIS\\Dewberry\\gdb\\MS4Data.gdb\\Outfalls_All"
Outfalls_All_DefQuery = "Outfalls_All_DefQuery"
Outfalls_All_Query = "D:\\GIS\\Dewberry\\gdb\\MS4Data.gdb\\Outfalls_All_Query"
SEWER = "D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\SEWER"
SEWER__2_ = "D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\SEWER"
SEWER__3_ = "D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\SEWER"
Sewer_All = "D:\\GIS\\Dewberry\\gdb\\MS4Data.gdb\\Sewer_All"
Sewers_All_DefQuery = "Sewers_All_DefQuery"
Sewers_All_Query = "D:\\GIS\\Dewberry\\gdb\\MS4Data.gdb\\Sewers_All_Query"

# Process: Merge (2)
arcpy.Merge_management(
    "D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS;D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS;D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS",
    Outfalls_All,
    "ENABLED \"Enabled\" true true false 2 Short 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,ENABLED,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,ENABLED,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,ENABLED,-1,-1;SEWER_TYPE \"SEWER_TYPE\" true true false 5 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,SEWER_TYPE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,SEWER_TYPE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,SEWER_TYPE,-1,-1;UNITID \"UNITID\" true true false 30 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITID,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITID,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITID,-1,-1;DATEBUILT_YEAR \"DATEBUILT_YEAR\" true true false 2 Short 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,DATEBUILT_YEAR,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,DATEBUILT_YEAR,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,DATEBUILT_YEAR,-1,-1;OWNERSHIP \"OWNERSHIP\" true true false 10 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,OWNERSHIP,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,OWNERSHIP,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,OWNERSHIP,-1,-1;SPDES_NUM \"SPDES_NUM\" true true false 10 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,SPDES_NUM,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,SPDES_NUM,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,SPDES_NUM,-1,-1;OUTFALL_SIZE \"SIZE1\" true true false 8 Double 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_SIZE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_SIZE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_SIZE,-1,-1;OUTFALL_SIZE2 \"OUTFALL_SIZE2\" true true false 8 Double 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_SIZE2,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_SIZE2,-1,-1;ITEM_ID \"ITEM_ID\" true true false 10 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,ITEM_ID,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,ITEM_ID,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,ITEM_ID,-1,-1;ROTATION \"ROTATION\" true true false 8 Double 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,ROTATION,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,ROTATION,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,ROTATION,-1,-1;GLOBALID \"GlobalID\" false false false 38 GlobalID 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,GLOBALID,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,GLOBALID,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,GLOBALID,-1,-1;SOURCE \"SourceImage\" true true false 256 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,SOURCE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,SOURCE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,SOURCE,-1,-1;LATITUDE \"LATITUDE\" true true false 8 Double 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,LATITUDE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,LATITUDE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,LATITUDE,-1,-1;LONGITUDE \"LONGITUDE\" true true false 8 Double 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,LONGITUDE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,LONGITUDE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,LONGITUDE,-1,-1;UNITTYPE \"UNITTYPE\" true true false 10 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITTYPE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITTYPE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITTYPE,-1,-1;CB \"CommunityBoard\" true true false 5 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,CB,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,CB,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,CB,-1,-1;POINT_X \"POINT_X\" true true false 8 Double 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,POINT_X,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,POINT_X,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,POINT_X,-1,-1;POINT_Y \"POINT_Y\" true true false 8 Double 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,POINT_Y,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,POINT_Y,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,POINT_Y,-1,-1;COMMENTS \"COMMENTS\" true true false 255 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,COMMENTS,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,COMMENTS,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,COMMENTS,-1,-1;BORO \"BORO\" true true false 2 Short 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,BORO,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,BORO,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,BORO,-1,-1;UNITS \"UNITS\" true true false 50 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITS,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITS,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,UNITS,-1,-1;HEADWALL \"HEADWALL\" true true false 2 Short 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,HEADWALL,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,HEADWALL,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,HEADWALL,-1,-1;PipeDia2 \"PIPE_DIAMETER2\" true true false 4 Long 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,PipeDia2,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,PipeDia2,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,PipeDia2,-1,-1;REGULATOR_ID \"REGULATOR_ID\" true true false 50 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,REGULATOR_ID,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,REGULATOR_ID,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,REGULATOR_ID,-1,-1;RESPONSIBILITY \"RESPONSIBILITY\" true true false 10 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,RESPONSIBILITY,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,RESPONSIBILITY,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,RESPONSIBILITY,-1,-1;OUTFALL_TYPE \"OUTFALL_TYPE\" true true false 10 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_TYPE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_TYPE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,OUTFALL_TYPE,-1,-1;OF_SIZE \"OF_SIZE\" true true false 30 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,OF_SIZE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,OF_SIZE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,OF_SIZE,-1,-1;RECEIVING_WATERBODY \"RECEIVING_WATER_BODY\" true true false 50 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,RECEIVING_WATERBODY,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,RECEIVING_WATERBODY,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,RECEIVING_WATERBODY,-1,-1;MS4_ASSET \"MS4_ASSET\" true true false 50 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,MS4_ASSET,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,MS4_ASSET,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,MS4_ASSET,-1,-1;DATEBUILT \"DATEBUILT\" true true false 8 Date 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,DATEBUILT,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,DATEBUILT,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,DATEBUILT,-1,-1;CREATEDATE \"CREATEDATE\" true true false 8 Date 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,CREATEDATE,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,CREATEDATE,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,CREATEDATE,-1,-1;MS4_ASSET_REV \"MS4_ASSET_REV\" true true false 50 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\HP\\rcu-32304.gdb\\NYCDEP_Sewer\\OUTFALLS,MS4_ASSET_REV,-1,-1,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,MS4_ASSET_REV,-1,-1,D:\\GIS\\Dewberry\\gdb\\TI\\rcu-32332.gdb\\NYCDEP_Sewer\\OUTFALLS,MS4_ASSET_REV,-1,-1;TREATMENT_PLANT \"TREATMENT_PLANT\" true true false 255 Text 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,TREATMENT_PLANT,-1,-1;TEMPOID \"TEMPOID\" true true false 4 Long 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,TEMPOID,-1,-1;PipeDia \"PIPE_DIAMETER\" true true false 4 Long 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,PipeDia,-1,-1;No_field \"NUMBER_FIELD\" true true false 2 Short 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,No_field,-1,-1;TIDEFLAP_GATE \"TIDEFLAP_GATE\" true true false 2 Short 0 0 ,First,#,D:\\GIS\\Dewberry\\gdb\\SI\\31936_Sewer.gdb\\NYCDEP_Sewer\\OUTFALLS,TIDEFLAP_GATE,-1,-1"
)

# Process: Make Feature Layer
arcpy.MakeFeatureLayer_management(
    Outfalls_All, Outfalls_All_DefQuery,
    "SEWER_TYPE IN ('STM', NULL, 'UNK') AND NOT OWNERSHIP = 'NYSDOT' AND MS4_ASSET IN ( NULL, 'N') AND TREATMENT_PLANT IN (NULL, 'PR')",
    "",
    "OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;ENABLED ENABLED VISIBLE NONE;TREATMENT_PLANT TREATMENT_PLANT VISIBLE NONE;SEWER_TYPE SEWER_TYPE VISIBLE NONE;UNITID UNITID VISIBLE NONE;DATEBUILT_YEAR DATEBUILT_YEAR VISIBLE NONE;OWNERSHIP OWNERSHIP VISIBLE NONE;SPDES_NUM SPDES_NUM VISIBLE NONE;OUTFALL_SIZE OUTFALL_SIZE VISIBLE NONE;ITEM_ID ITEM_ID VISIBLE NONE;ROTATION ROTATION VISIBLE NONE;GLOBALID GLOBALID VISIBLE NONE;TEMPOID TEMPOID VISIBLE NONE;PipeDia PipeDia VISIBLE NONE;SOURCE SOURCE VISIBLE NONE;LATITUDE LATITUDE VISIBLE NONE;LONGITUDE LONGITUDE VISIBLE NONE;UNITTYPE UNITTYPE VISIBLE NONE;CB CB VISIBLE NONE;POINT_X POINT_X VISIBLE NONE;POINT_Y POINT_Y VISIBLE NONE;COMMENTS COMMENTS VISIBLE NONE;BORO BORO VISIBLE NONE;UNITS UNITS VISIBLE NONE;No_field No_field VISIBLE NONE;TIDEFLAP_GATE TIDEFLAP_GATE VISIBLE NONE;HEADWALL HEADWALL VISIBLE NONE;PipeDia2 PipeDia2 VISIBLE NONE;REGULATOR_ID REGULATOR_ID VISIBLE NONE;RESPONSIBILITY RESPONSIBILITY VISIBLE NONE;OUTFALL_TYPE OUTFALL_TYPE VISIBLE NONE;OF_SIZE OF_SIZE VISIBLE NONE;RECEIVING_WATERBODY RECEIVING_WATERBODY VISIBLE NONE;MS4_ASSET MS4_ASSET VISIBLE NONE;DATEBUILT DATEBUILT VISIBLE NONE;CREATEDATE CREATEDATE VISIBLE NONE;MS4_ASSET_REV MS4_ASSET_REV VISIBLE NONE;OUTFALL_SIZE2 OUTFALL_SIZE2 VISIBLE NONE"
)

# Process: Copy Features
arcpy.CopyFeatures_management(Outfalls_All_DefQuery, Outfalls_All_Query, "",
                              "0", "0", "0")
def buff_water_area(exports, buff_size, riv_line_fold, scratch_gdb,
                    OrdSurv_Grid, epsg_code):
    ordsurv_gp = gpd.read_file(OrdSurv_Grid, driver="ESRI Shapefile")
    ordsurv_gp.crs = ({'init': 'epsg:' + epsg_code})

    direc_list = next(os.walk(exports))[1]

    # print(direc_list)

    # iterate over top folder containing OS regions
    print("start looping folders")
    for folder in direc_list:
        # if folder == 'sx' or folder == 'sy': # Just for testing

        wa_path = os.path.join(exports, folder, folder + "_WaterArea.shp")
        if arcpy.Exists(wa_path):
            print("buffering water area for OS Grid {0}".format(folder))
            wa_buff_tmp = r"in_memory/wa_buff"
            arcpy.Buffer_analysis(wa_path,
                                  r"in_memory/wa_buff",
                                  buffer_distance_or_field=buff_size,
                                  line_side="FULL")

            line_buff = buff_lines(riv_line_fold, folder, buff_size,
                                   OrdSurv_Grid)

            buffer_out = os.path.join(scratch_gdb, folder + "water_buffer")

            if line_buff is None:
                print(
                    "no line features for {0}, using olys only".format(folder))
                arcpy.CopyFeatures_management(wa_buff_tmp, buffer_out)
            else:
                print(
                    "merging line and polygons for OS Grid {0}".format(folder))
                merge_list = [wa_buff_tmp, line_buff]
                arcpy.Merge_management(merge_list, buffer_out)

            water_gpd = gpd.read_file(scratch_gdb, driver='FileGDB')
            water_gpd['Wat_True'] = 1
            os_Area_gpd = ordsurv_gp[ordsurv_gp.GRIDSQ == folder.upper()]

            rasterise_that(water_gpd, os_Area_gpd, exports, epsg_code,
                           folder.upper())
            arcpy.Delete_management(buffer_out)

        else:
            line_buff = buff_lines(riv_line_fold, folder, buff_size,
                                   OrdSurv_Grid)
            if line_buff is None:
                print("no rivers for OS GRID {0}".format(folder))
            else:
                buffer_out = os.path.join(scratch_gdb, folder + "water_buffer")
                arcpy.CopyFeatures_management(line_buff, buffer_out)
                water_gpd = gpd.read_file(scratch_gdb, driver='FileGDB')
                water_gpd['Wat_True'] = 1
                os_Area_gpd = ordsurv_gp[ordsurv_gp.GRIDSQ == folder.upper()]

                rasterise_that(water_gpd, os_Area_gpd, exports, epsg_code,
                               folder.upper())
                arcpy.Delete_management(buffer_out)

        arcpy.Delete_management(r"in_memory")
Ejemplo n.º 20
0
    def execute(self, params, messages):
        arcpy.AddMessage(
            """Welcome to the Source Feature and EO Assigner! This tool is designed to prepare a feature class or shapefile for bulk load into Biotics by assigning an existing or new SFID and EOID grouping variable to observations based on eparation distance. This used to be done manually, so sit back and enjoy all the other work you can be doing instead of this!"""
        )

        in_points = params[0].valueAsText
        in_lines = params[1].valueAsText
        in_poly = params[2].valueAsText
        species_code = params[3].valueAsText
        lu_separation = params[4].valueAsText
        eo_reps = params[5].valueAsText
        eo_id_field = params[6].valueAsText
        eo_sourcept = params[7].valueAsText
        eo_sourceln = params[8].valueAsText
        eo_sourcepy = params[9].valueAsText
        sf_id_field = params[10].valueAsText
        species_code_field = params[11].valueAsText

        arcpy.env.workspace = "in_memory"

        arcpy.AddMessage("Preparing input data")
        #prepare single fc from biotics sf fcs
        sfs_in = [eo_sourcept, eo_sourceln, eo_sourcepy]
        sfs_out = ["eo_sourcept", "eo_sourceln", "eo_sourcepy"]
        for sf_in, sf_out in zip(sfs_in, sfs_out):
            arcpy.Buffer_analysis(sf_in, sf_out, 1)
        sf_merge = arcpy.Merge_management(sfs_out, "sf_merge")
        sf_lyr = arcpy.MakeFeatureLayer_management(sf_merge, "sf_lyr")

        data_in = []
        data_out = []
        if in_points:
            data_in.append(in_points)
            data_out.append("pts")
        if in_lines:
            data_in.append(in_lines)
            data_out.append("lines")
        if in_poly:
            data_in.append(in_poly)
            data_out.append("polys")

        join_id = 1
        for i, o in zip(data_in, data_out):
            arcpy.AddField_management(i, "temp_join_id", "TEXT")
            with arcpy.da.UpdateCursor(i, "temp_join_id") as cursor:
                for row in cursor:
                    row[0] = str(join_id)
                    cursor.updateRow(row)
                    join_id += 1
            arcpy.Buffer_analysis(i, o, 1)
        data_merge = arcpy.Merge_management(data_out, "data_merge")
        data_lyr = arcpy.MakeFeatureLayer_management(data_merge, "data_lyr")

        if arcpy.ListFields(data_lyr, species_code)[0].type == 'Integer':
            species_query = "{}={}"
        else:
            species_query = "{}='{}'"
        if arcpy.ListFields(data_lyr, species_code_field)[0].type == 'Integer':
            eo_species_query = "{}={}"
        else:
            eo_species_query = "{}='{}'"

        #get name of true OID field
        objectid_field = arcpy.Describe(data_lyr).OIDFieldName

        #create feature layers to allow for selections
        eo_reps = arcpy.MakeFeatureLayer_management(eo_reps, "eo_reps")

        #add EO/SF ID fields if they do not already exist
        add_fields_text = ["SF_ID", "SF_NEW", "EO_ID", "EO_NEW"]
        for field in add_fields_text:
            if len(arcpy.ListFields(data_lyr, field)) == 0:
                arcpy.AddField_management(data_lyr, field, "TEXT", "", "", 50)
        add_fields_int = ["UNIQUEID"]
        for field in add_fields_int:
            if len(arcpy.ListFields(data_lyr, field)) == 0:
                arcpy.AddField_management(data_lyr, field, "LONG")

        #set word index to assign words to new EO groups
        word_index = 1
        observation_num = 1

        arcpy.AddMessage("Beginning to assign EO IDs")
        #get total records in data_lyr for progress reporting messages
        total_obs = arcpy.GetCount_management(data_lyr)
        #start assigning loop
        search_fields = [
            objectid_field, "EO_ID", "EO_NEW", species_code, lu_separation
        ]
        with arcpy.da.SearchCursor(data_lyr, search_fields) as cursor:
            for row in cursor:
                objectid = row[0]
                #if one of the EOID fields already have a value, continue on to next feature
                if row[2] != None and (row[1] != None or row[1] != 0):
                    arcpy.AddMessage(
                        "ObjectID " + str(objectid) +
                        " EO Observation number " + str(observation_num) +
                        "/" + str(total_obs) +
                        " has already been assigned to a new or existing EO.")
                    pass
                else:
                    sname = row[3]
                    distance = str(row[4] * 1000) + " METERS"

                    #select feature and assign sname and separation distance variables
                    arcpy.SelectLayerByAttribute_management(
                        data_lyr, "NEW_SELECTION",
                        "{}={}".format(objectid_field, objectid))
                    #check for existing EO reps within separation distance of feature
                    arcpy.SelectLayerByAttribute_management(
                        eo_reps, 'NEW_SELECTION',
                        eo_species_query.format(species_code_field, sname))
                    arcpy.SelectLayerByLocation_management(
                        eo_reps, "WITHIN_A_DISTANCE", data_lyr, distance,
                        "SUBSET_SELECTION")
                    #check for selection on eo_reps layer - if there is a selection, get eoid, select all observations within the separation distance, and assign existing eoid to selected features
                    selection_num = arcpy.Describe(eo_reps).fidset
                    if selection_num is not u'':
                        with arcpy.da.SearchCursor(eo_reps,
                                                   eo_id_field) as cursor:
                            #eoid = sorted({row[0] for row in cursor}, reverse=True)[0] #use this if keeping newest EO
                            eoid = ",".join(
                                sorted({str(row[0])
                                        for row in cursor})
                            )  #use this if filling with EOIDs of all EOs within separation distance
                        #set arbitrary unequal counts to start while loop
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                distance, "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(data_lyr,
                                                   "EO_ID") as cursor:
                            for row in cursor:
                                row[0] = str(eoid)
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned an existing EO: " +
                            str(eoid) + ". " + str(observation_num) + "/" +
                            str(total_obs) + " completed.")
                    #if no existing EOs selected within separation distance, select all observations within the separation distance and assign new random word
                    else:
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                distance, "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(data_lyr,
                                                   "EO_NEW") as cursor:
                            for row in cursor:
                                row[0] = str(
                                    word_index)  #word_list[word_index]
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned a new EO: " +
                            str(word_index) + ". " + str(observation_num) +
                            "/" + str(total_obs) + " completed.")
                        word_index += 1
                observation_num += 1
                arcpy.SelectLayerByAttribute_management(
                    data_lyr, "CLEAR_SELECTION")

        arcpy.AddMessage("Beginning to assign SF IDs")
        observation_num = 1
        search_fields = [objectid_field, "SF_ID", "SF_NEW", species_code]
        with arcpy.da.SearchCursor(data_lyr, search_fields) as cursor:
            for row in cursor:
                objectid = row[0]
                if row[2] != None and (row[1] != None or row[1] != 0):
                    arcpy.AddMessage(
                        "ObjectID " + str(objectid) +
                        " SF Observation number " + str(observation_num) +
                        "/" + str(total_obs) +
                        " has already been assigned to a new or existing SF.")
                else:
                    sname = row[3]

                    #check for existing SFs within 9m of feature (8m because of 1m buffer on SF layers)
                    arcpy.SelectLayerByAttribute_management(
                        data_lyr, "NEW_SELECTION",
                        "{}={}".format(objectid_field, objectid))
                    arcpy.SelectLayerByAttribute_management(
                        sf_lyr, 'NEW_SELECTION',
                        eo_species_query.format(species_code_field, sname))
                    arcpy.SelectLayerByLocation_management(
                        sf_lyr, "WITHIN_A_DISTANCE", data_lyr, "7 METERS",
                        "SUBSET_SELECTION")
                    #check for selection on sf_merge layer - if there is a selection, get sfid, select all observations within the separation distance, and assign existing eoid to selected features
                    if arcpy.Describe('sf_lyr').fidset is not u'':
                        with arcpy.da.SearchCursor('sf_lyr',
                                                   sf_id_field) as cursor:
                            #sfid = sorted({row[0] for row in cursor}, reverse=True)[0] #use this line if you want to use the newest SF ID within separation distance
                            sfid = ",".join(
                                sorted({str(row[0])
                                        for row in cursor})
                            )  # use this line if you want to list all SF IDs within separation distance
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                "7 METERS", "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))

                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(data_lyr,
                                                   "SF_ID") as cursor:
                            for row in cursor:
                                row[0] = sfid
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned an existing SF: " +
                            str(sfid) + ". " + str(observation_num) + "/" +
                            str(total_obs) + " completed.")
                    #if no existing SFs selected within separation distance, select all observations within the separation distance and assign new random word
                    else:
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                "7 METERS", "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(
                                data_lyr, ["SF_NEW", "EO_NEW"]) as cursor:
                            for row in cursor:
                                if row[1] != None:
                                    sf_id = row[1] + "_" + str(
                                        word_index)  #word_list[word_index]
                                    row[0] = sf_id
                                else:
                                    sf_id = str(
                                        word_index)  #word_list[word_index]
                                    row[0] = sf_id
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned a new SF: " + sf_id +
                            ". " + str(observation_num) + "/" +
                            str(total_obs) + " completed.")
                        word_index += 1
                observation_num += 1
                arcpy.SelectLayerByAttribute_management(
                    data_lyr, "CLEAR_SELECTION")

        #create unique id value for each unique source feature
        i = 1
        with arcpy.da.SearchCursor(data_lyr,
                                   ["SF_ID", "SF_NEW", "UNIQUEID"]) as cursor:
            sfid1 = sorted({row[0] for row in cursor})
        with arcpy.da.SearchCursor(data_lyr,
                                   ["SF_ID", "SF_NEW", "UNIQUEID"]) as cursor:
            sfid2 = sorted({row[1] for row in cursor})
        sfid = sfid1 + sfid2
        sfid = [x for x in sfid if x is not None]
        sfid = [x.encode('UTF8') for x in sfid]
        for sf in sfid:
            with arcpy.da.UpdateCursor(
                    data_lyr, ["SF_ID", "SF_NEW", "UNIQUEID"]) as cursor:
                for row in cursor:
                    if row[0] == sf:
                        row[2] = i
                        cursor.updateRow(row)
                    elif row[1] == sf:
                        row[2] = i
                        cursor.updateRow(row)
                    else:
                        pass
            i += 1

        add_fields = add_fields_int + add_fields_text
        for data in data_in:
            arcpy.JoinField_management(data, "temp_join_id", data_lyr,
                                       "temp_join_id", add_fields)
            arcpy.DeleteField_management(data, "temp_join_id")

        arcpy.Delete_management("in_memory")
        return
Ejemplo n.º 21
0
            if str(simulate) == "true":
                preserve("PatientScr")

        # delete all interim calculated fields
        arcpy.DeleteField_management(UnpreservedParcels_Output, [
            'LocalValue', 'PatientWgt', 'AvgNeighSz', 'COUNT_NEAR',
            'NeighbArea', 'SUM_POLY_A', 'GreedyWght', 'CombndAcre',
            'SUM_Weight', 'POLY_AREA'
        ])

        # create a placeholder shapefile in memory and re-merge the preserved and unpreserved parcels
        UnpreservedPlaceholder = arcpy.CreateFeatureclass_management(
            'in_memory', 'unprsrvdPH', 'POLYGON')
        arcpy.CopyFeatures_management(UnpreservedParcels_Output,
                                      UnpreservedPlaceholder)
        arcpy.Merge_management([UnpreservedPlaceholder, PreservedParcels],
                               UnpreservedParcels_Output)

        # count the number of simulations
        simcount = simcount + 1

        # delete all items created in memory
        arcpy.Delete_management("in_memory")

    # Add a layer for that new shapefile to the active data frame
    currentMap = arcpy.mapping.MapDocument("CURRENT")
    currentDataFrame = currentMap.activeDataFrame
    layerToBeDisplayed = arcpy.mapping.Layer(UnpreservedParcels_Output)
    arcpy.mapping.AddLayer(currentDataFrame, layerToBeDisplayed, "TOP")
    del currentMap

except Exception as e:
Ejemplo n.º 22
0
    def execute(self, params, messages):
        arcpy.AddMessage(
            """Welcome to the Source Feature and EO Assigner! This tool is designed to prepare a feature class or shapefile for bulk load into Biotics by assigning an existing or new SFID and EOID grouping variable to observations based on eparation distance. This used to be done manually, so sit back and enjoy all the other work you can be doing instead of this!"""
        )

        in_points = params[0].valueAsText
        in_lines = params[1].valueAsText
        in_poly = params[2].valueAsText
        species_code = params[3].valueAsText
        lu_separation = params[4].valueAsText
        eo_reps = params[5].valueAsText
        eo_id_field = params[6].valueAsText
        eo_sourcept = params[7].valueAsText
        eo_sourceln = params[8].valueAsText
        eo_sourcepy = params[9].valueAsText
        sf_id_field = params[10].valueAsText
        species_code_field = params[11].valueAsText
        flowlines = params[12].valueAsText
        network = params[13].valueAsText
        dams = params[14].valueAsText
        snap_dist = params[15].valueAsText

        arcpy.AddMessage("Preparing input data for use in EO/SF assignment.")
        data_in = []
        data_out = []
        if in_points:
            data_in.append(in_points)
            data_out.append("pts")
        if in_lines:
            data_in.append(in_lines)
            data_out.append("lines")
        if in_poly:
            data_in.append(in_poly)
            data_out.append("polys")
        join_id = 1
        for i, o in zip(data_in, data_out):
            if len(arcpy.ListFields(i, "temp_join_id")) == 0:
                arcpy.AddField_management(i, "temp_join_id", "TEXT")
            with arcpy.da.UpdateCursor(i, "temp_join_id") as cursor:
                for row in cursor:
                    row[0] = str(join_id)
                    cursor.updateRow(row)
                    join_id += 1
            arcpy.FeatureVerticesToPoints_management(i, o, "ALL")
        species_pt = arcpy.Merge_management(data_out, "data_merge")
        species_pt_copy = arcpy.FeatureClassToFeatureClass_conversion(
            species_pt, "in_memory", "species_pt_copy")

        #prepare single fc from biotics sf fcs
        sfs_in = [eo_sourcept, eo_sourceln, eo_sourcepy]
        sfs_out = ["eo_sourcept", "eo_sourceln", "eo_sourcepy"]
        for sf_in, sf_out in zip(sfs_in, sfs_out):
            arcpy.Buffer_analysis(sf_in, sf_out, 1)
        sf_merge = arcpy.Merge_management(sfs_out, "sf_merge")

        #delete identical points with tolerance to increase speed
        arcpy.DeleteIdentical_management(species_pt, ["temp_join_id", "Shape"],
                                         "100 Meters")

        #add EO/SF ID fields if they do not already exist
        add_fields_text = ["SF_ID", "SF_NEW", "EO_ID", "EO_NEW"]
        for field in add_fields_text:
            if len(arcpy.ListFields(species_pt, field)) == 0:
                arcpy.AddField_management(species_pt, field, "TEXT", "", "",
                                          100)
        add_fields_int = ["UNIQUEID"]
        for field in add_fields_int:
            if len(arcpy.ListFields(species_pt, field)) == 0:
                arcpy.AddField_management(species_pt, field, "LONG")

        #create lookup dictionary of separation distances from lookup table
        lu_sep = {
            f[0]: f[1]
            for f in arcpy.da.SearchCursor(species_pt,
                                           [species_code, lu_separation])
        }
        #create list of species
        with arcpy.da.SearchCursor(species_pt, species_code) as cursor:
            species_list = sorted({row[0] for row in cursor})

        if arcpy.ListFields(species_pt, species_code)[0].type == 'Integer':
            species_query = "{}={}"
        else:
            species_query = "{}='{}'"
        if arcpy.ListFields(species_pt,
                            species_code_field)[0].type == 'Integer':
            eo_species_query = "{}={}"
        else:
            eo_species_query = "{}='{}'"

        #separate buffered flowlines at dams
        if dams:
            #buffer dams by 1.1 meters
            dam_buff = arcpy.Buffer_analysis(dams, "dam_buff", "1.1 Meter",
                                             "FULL", "FLAT")

        total_species = len(species_list)
        species_rep = 1
        group_id = 1
        for species in species_list:
            arcpy.AddMessage("Assigning EO for " + str(species_rep) + "/" +
                             str(total_species) + ": " + species)
            species_rep += 1
            s = arcpy.FeatureClassToFeatureClass_conversion(
                species_pt_copy, "in_memory", "s",
                species_query.format(species_code, species))
            eo = arcpy.MakeFeatureLayer_management(
                eo_reps, "eo",
                eo_species_query.format(species_code_field, species))
            sf_lyr = arcpy.MakeFeatureLayer_management(
                sf_merge, "sf_lyr",
                eo_species_query.format(species_code_field, species))
            for k, v in lu_sep.items():
                if k == species:
                    distance = (v * 1000) - 2

            #arcpy.AddMessage("Creating service area line layer for " +species + " to compare to existing EOs")
            #create service area line layer
            eo_service_area_lyr = arcpy.na.MakeServiceAreaLayer(
                network,
                "eo_service_area_lyr",
                "Length",
                "TRAVEL_FROM",
                distance,
                polygon_type="NO_POLYS",
                line_type="TRUE_LINES",
                overlap="OVERLAP")
            eo_service_area_lyr = eo_service_area_lyr.getOutput(0)
            subLayerNames = arcpy.na.GetNAClassNames(eo_service_area_lyr)
            eo_facilitiesLayerName = subLayerNames["Facilities"]
            eo_serviceLayerName = subLayerNames["SALines"]
            arcpy.na.AddLocations(eo_service_area_lyr, eo_facilitiesLayerName,
                                  s, "", snap_dist)
            arcpy.na.Solve(eo_service_area_lyr)
            eo_lines = arcpy.mapping.ListLayers(eo_service_area_lyr,
                                                eo_serviceLayerName)[0]
            eo_flowline_clip = arcpy.CopyFeatures_management(
                eo_lines, "eo_service_area")
            eo_flowline_buff = arcpy.Buffer_analysis(eo_flowline_clip,
                                                     "eo_flowline_buff",
                                                     "1 Meter", "FULL",
                                                     "ROUND")
            eo_flowline_diss = arcpy.Dissolve_management(
                eo_flowline_buff, "eo_flowline_diss", multi_part="SINGLE_PART")
            eo_join = arcpy.SpatialJoin_analysis(
                target_features=eo_flowline_diss,
                join_features=eo,
                out_feature_class="eo_join",
                join_operation="JOIN_ONE_TO_MANY",
                join_type="KEEP_ALL",
                match_option="INTERSECT")

            for k, v in lu_sep.items():
                if k == species:
                    distance = ((v * 1000 / 2) - 2)

            #arcpy.AddMessage("Creating service area line layer for " +species + " separation distance grouping")
            sp_service_area_lyr = arcpy.na.MakeServiceAreaLayer(
                network,
                "sp_service_area_lyr",
                "Length",
                "TRAVEL_FROM",
                distance,
                polygon_type="NO_POLYS",
                line_type="TRUE_LINES",
                overlap="OVERLAP")
            sp_service_area_lyr = sp_service_area_lyr.getOutput(0)
            subLayerNames = arcpy.na.GetNAClassNames(sp_service_area_lyr)
            sp_facilitiesLayerName = subLayerNames["Facilities"]
            sp_serviceLayerName = subLayerNames["SALines"]
            arcpy.na.AddLocations(sp_service_area_lyr, sp_facilitiesLayerName,
                                  s, "", snap_dist)
            arcpy.na.Solve(sp_service_area_lyr)
            sp_lines = arcpy.mapping.ListLayers(sp_service_area_lyr,
                                                sp_serviceLayerName)[0]
            sp_flowline_clip = arcpy.CopyFeatures_management(
                sp_lines, "sp_service_area")
            sp_flowline_buff = arcpy.Buffer_analysis(sp_flowline_clip,
                                                     "sp_flowline_buff",
                                                     "1 Meter", "FULL",
                                                     "ROUND")
            sp_flowline_diss = arcpy.Dissolve_management(
                sp_flowline_buff, "sp_flowline_diss", multi_part="SINGLE_PART")

            if dams:
                #split flowline buffers at dam buffers by erasing area of dam
                flowline_erase = arcpy.Erase_analysis(sp_flowline_diss,
                                                      dam_buff,
                                                      "flowline_erase")
                multipart_input = flowline_erase
                #multi-part to single part to create unique polygons
                single_part = arcpy.MultipartToSinglepart_management(
                    multipart_input, "single_part")
            else:
                single_part = sp_flowline_diss

            #create unique group id
            arcpy.AddField_management(single_part, "group_id", "LONG")
            with arcpy.da.UpdateCursor(single_part, "group_id") as cursor:
                for row in cursor:
                    row[0] = group_id
                    cursor.updateRow(row)
                    group_id += 1

            sp_join = arcpy.SpatialJoin_analysis(
                target_features=single_part,
                join_features=eo_join,
                out_feature_class="sp_join",
                join_operation="JOIN_ONE_TO_MANY",
                join_type="KEEP_ALL",
                match_option="INTERSECT")
            sp_join1 = arcpy.SpatialJoin_analysis(
                target_features=s,
                join_features=sp_join,
                out_feature_class="join",
                join_operation="JOIN_ONE_TO_MANY",
                join_type="KEEP_ALL",
                match_option="INTERSECT",
                search_radius="200 METERS")

            arcpy.AddField_management(sp_join1, "eoid", "TEXT", "", "", 100)
            #Create empty dictionaries
            Ndi = {}
            #create SearchCursor object
            with arcpy.da.SearchCursor(sp_join1,
                                       ["group_id", eo_id_field]) as cursor:
                for row in cursor:
                    if row[1]:
                        if not row[0] in Ndi:
                            Ndi[row[0]] = [row[1]]
                        else:
                            Ndi[row[0]].append(row[1])

            Ndi = {k: list(set(v)) for k, v in Ndi.items()}

            #create UpdateCursor
            if not Ndi == True:
                with arcpy.da.UpdateCursor(sp_join1,
                                           ["group_id", "eoid"]) as cursor:
                    for row in cursor:
                        if row[0] in Ndi:
                            row[1] = ",".join(map(str, Ndi[row[0]]))
                            cursor.updateRow(row)

            arcpy.DeleteIdentical_management(
                sp_join1, ["temp_join_id", "eoid", "group_id"])

            id_fill = {
                f[0]: [f[1], f[2]]
                for f in arcpy.da.SearchCursor(
                    sp_join1, ["temp_join_id", "group_id", "eoid"])
            }

            with arcpy.da.UpdateCursor(
                    species_pt, ["temp_join_id", "EO_NEW", "EO_ID"]) as cursor:
                for row in cursor:
                    for k, v in id_fill.items():
                        if k == row[0] and v[1] is not None:
                            row[2] = str(v[1])
                            cursor.updateRow(row)
                        elif k == row[0] and v[1] is None:
                            row[1] = str(v[0])
                            cursor.updateRow(row)
                        else:
                            pass

        arcpy.DeleteIdentical_management(species_pt,
                                         ["temp_join_id", "EO_ID", "EO_NEW"])

        #get name of true OID field
        objectid_field = arcpy.Describe(species_pt).OIDFieldName
        species_lyr = arcpy.MakeFeatureLayer_management(
            species_pt, "species_lyr")

        arcpy.AddMessage("Assigning source feature IDs to all records.")
        search_fields = [objectid_field, "SF_ID", "SF_NEW", species_code]
        with arcpy.da.SearchCursor(species_lyr, search_fields) as cursor:
            for row in cursor:
                objectid = row[0]
                if row[2] != None and (row[1] != None or row[1] != 0):
                    pass
                else:
                    sname = row[3]

                    #check for existing SFs within 9m of feature (8m because of 1m buffer on SF layers)
                    arcpy.SelectLayerByAttribute_management(
                        species_lyr, "NEW_SELECTION",
                        "{}={}".format(objectid_field, objectid))
                    arcpy.SelectLayerByAttribute_management(
                        sf_lyr, 'NEW_SELECTION',
                        eo_species_query.format(species_code_field, sname))
                    arcpy.SelectLayerByLocation_management(
                        sf_lyr, "WITHIN_A_DISTANCE", species_lyr, "8 METERS",
                        "SUBSET_SELECTION")
                    #check for selection on sf_merge layer - if there is a selection, get sfid, select all observations within the separation distance, and assign existing eoid to selected features
                    if arcpy.Describe('sf_lyr').fidset is not u'':
                        with arcpy.da.SearchCursor('sf_lyr',
                                                   sf_id_field) as cursor:
                            #sfid = sorted({row[0] for row in cursor}, reverse=True)[0] #use this line if you want to use the newest SF ID within separation distance
                            sfid = ",".join(
                                sorted({str(row[0])
                                        for row in cursor})
                            )  # use this line if you want to list all SF IDs within separation distance
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "species_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                species_lyr, "WITHIN_A_DISTANCE", species_lyr,
                                "9 METERS", "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                species_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "species_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(species_lyr,
                                                   "SF_ID") as cursor:
                            for row in cursor:
                                row[0] = sfid
                                cursor.updateRow(row)
                    #if no existing SFs selected within separation distance, select all observations within the separation distance and assign new random word
                    else:
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "species_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                species_lyr, "WITHIN_A_DISTANCE", species_lyr,
                                "9 METERS", "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                species_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "species_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(
                                species_lyr, ["SF_NEW", "EO_NEW"]) as cursor:
                            for row in cursor:
                                if row[1] != None:
                                    sf_id = row[1] + "_" + str(
                                        group_id)  #word_list[word_index]
                                    row[0] = sf_id
                                else:
                                    sf_id = str(
                                        group_id)  #word_list[word_index]
                                    row[0] = sf_id
                                cursor.updateRow(row)
                        group_id += 1
                arcpy.SelectLayerByAttribute_management(
                    species_lyr, "CLEAR_SELECTION")

        #create unique id value for each unique source feature
        i = 1
        with arcpy.da.SearchCursor(species_lyr, ["SF_ID"]) as cursor:
            sfid1 = sorted({row[0] for row in cursor})
        with arcpy.da.SearchCursor(species_lyr, ["SF_NEW"]) as cursor:
            sfid2 = sorted({row[0] for row in cursor})
        sfid = sfid1 + sfid2
        sfid = [x for x in sfid if x is not None]
        for sf in sfid:
            with arcpy.da.UpdateCursor(
                    species_lyr, ["SF_ID", "SF_NEW", "UNIQUEID"]) as cursor:
                for row in cursor:
                    if row[0] == sf:
                        row[2] = i
                        cursor.updateRow(row)
                    elif row[1] == sf:
                        row[2] = i
                        cursor.updateRow(row)
                    else:
                        pass
            i += 1

        add_fields = add_fields_int + add_fields_text
        for data in data_in:
            arcpy.JoinField_management(data, "temp_join_id", species_lyr,
                                       "temp_join_id", add_fields)

        for data in data_in:
            arcpy.DeleteField_management(data, "temp_join_id")
        arcpy.Delete_management("in_memory")

        return
Ejemplo n.º 23
0
no_ls_trn = train_count
no_ls_tst = test_count
arcpy.Erase_analysis(
    "all_poi.shp", hey,
    "erased_poi.shp")  ##erased landslide pixels in the All pix
arcpy.SubsetFeatures_ga(
    "erased_poi.shp", "data_trn.shp", "", no_ls_trn, "ABSOLUTE_VALUE"
)  ###random no landslide pixels is selecting as many as train landslide pixels
arcpy.SubsetFeatures_ga(
    "erased_poi.shp", "data_tst.shp", "", no_ls_tst, "ABSOLUTE_VALUE"
)  ###random no landslide pixels is selecting as many as validation landslide pixels

merged_train = "hey_poi.shp", "data_trn.shp"
merged_test = "hey_poi_test.shp", "data_tst.shp"

arcpy.Merge_management(merged_train, "train.shp")  #Train data
arcpy.Merge_management(merged_test, "validation.shp")  #Validation data
#//////////////////CALCULATING FREQUENCY RATIO AND INFORMATION VALUE///////////
analiz_hey = "train_1.shp"
arcpy.AddMessage("Extracting landslide pixels")
for i in rec_list:
    masking = ExtractByMask(i, analiz_hey)
    mask_out = os.path.join(rec, str("ext" + (i)))
    masking.save(mask_out)
    arcpy.AddMessage(i + " is processing")
hey_list = arcpy.ListDatasets("ext*", "Raster")
for n in hey_list:
    d = []
    fields0 = [c.name for c in arcpy.ListFields(n) if not c.required]

    for k in fields0:
Ejemplo n.º 24
0
    # layer = arcpy.MakeFeatureLayer_management(wgs84_path)
    layer = arcpy.MakeFeatureLayer_management(wetland)

    selection = arcpy.SelectLayerByLocation_management(
        layer,
        overlap_type='WITHIN_A_DISTANCE',
        select_features=mask,
        search_distance="3 Kilometers")

    print('Features selected.')

    arcpy.CopyFeatures_management(selection, layer_name_3km)

    del layer
    del selection

    arcpy.Project_management(in_dataset=layer_name_3km,
                             out_dataset=layer_name_wgs84,
                             out_coor_system=outCS,
                             transform_method=transformation,
                             preserve_shape="PRESERVE_SHAPE")

    print(layer_name_wgs84 + ' projection successful.')

    print(wetland + ' is finished.')

    out_file_list.append(layer_name_wgs84)

arcpy.Merge_management(out_file_list, "Wetlands_GOMcoast.shp", "",
                       "ADD_SOURCE_INFO")
Ejemplo n.º 25
0
             continue
    print "Processsing shapefile : " , in_file
        # Process: Project
    try:
            x = arcpy.Project_management(str(in_file), str(out_file), "PROJCS['NAD_1983_UTM_Zone_21N',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-57.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]", "", "PROJCS['NAD_1983_UTM_Zone_21N',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-57.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]", "NO_PRESERVE_SHAPE", "", "NO_VERTICAL")
            print x
            print out_file, ' created!'
      
    print 'Coordinates adjusted'


# # Process: Merge
park = r'D:\OneDrive\Mahdiyeh\my course 2\gis\small project\data\Shapefiles\input file\option\out_park.shp'
vacant = r'D:\OneDrive\Mahdiyeh\my course 2\gis\small project\data\Shapefiles\input file\option\out_vacant.shp'
all_option = r'D:\OneDrive\Mahdiyeh\my course 2\gis\small project\data\Shapefiles\input file\option\all_option.shp'
arcpy.Merge_management( "'%s';'%s'"%(park,vacant), all_option, "NO_SEQUENC \"NO_SEQUENC\" true true false 11 Double 0 11 ,First,#,str(park),NO_SEQUENC,-1,-1;CODE_USAGE \"CODE_USAGE\" true true false 3 Text 0 0 ,First,#,str(park),CODE_USAGE,-1,-1;USAG_DESCR \"USAG_DESCR\" true true false 100 Text 0 0 ,First,#,str(park),USAG_DESCR,-1,-1;PERIMETRE \"PERIMETRE\" true true false 19 Double 15 18 ,First,#,str(park),PERIMETRE,-1,-1,str(vacant),PERIMETRE,-1,-1;SUPERFICIE \"SUPERFICIE\" true true false 19 Double 15 18 ,First,#,str(vacant),SUPERFICIE,-1,-1,str(vacant),SUPERFICIE,-1,-1;NEAR_FID \"NEAR_FID\" true true false 10 Long 0 10 ,First,#,str(park),NEAR_FID,-1,-1;NEAR_DIST \"NEAR_DIST\" true true false 19 Double 0 0 ,First,#,str(park),NEAR_DIST,-1,-1;xcenter \"xcenter\" true true false 19 Double 0 0 ,First,#,str(park),xcenter,-1,-1;ycenter \"ycenter\" true true false 19 Double 0 0 ,First,#,.str(park),ycenter,-1,-1;area \"area\" true true false 19 Double 0 0 ,First,#,str(park),area,-1,-1;ARROND \"ARROND\" true true false 32 Text 0 0 ,First,#,str(vacant),ARROND,-1,-1")

print "end merge"


#   Euclidean Distance data
# Import arcpy module
import arcpy
if arcpy.CheckExtension("Spatial") == "Available":  
    arcpy.CheckOutExtension("Spatial")  
    print "Spatial is enabled"
else:  
    print "Spatial Analyst license is unavailable" 


root = Path(r'D:\OneDrive\Mahdiyeh\my course 2\gis\small project\data\Shapefiles\input file\to distance')
arcpy.env.workspace = folder
subfolder = arcpy.ListFiles()
print(subfolder)

# 读取文件夹中一共有哪些同名数据
shplist = []
for fol in subfolder:
    arcpy.env.workspace = folder + '\\' + fol
    files = arcpy.ListFeatureClasses()
    # print(files)
    for f in files:
        if f not in shplist:
            shplist.append(f)
print(shplist)

# 创建字典,并构建需要合并的同名数据的具体路径
dictionary = {}
for shp in shplist:
    dictionary[shp] = []
    for fol in subfolder:
        arcpy.env.workspace = folder + '\\' + fol
        files = arcpy.ListFeatureClasses()
        if shp in files:
            dictionary[shp].append(folder + '\\' + fol + '\\' + shp)

# 合并数据
for key in dictionary.keys():
    li = dictionary[key]
    print('正在合并要素:', li)
    arcpy.Merge_management(li, shpfolder + '\\' + key + '.shp')
Ejemplo n.º 27
0
import arcpy, os

Annotation_Merge_shp = "D:\\TAUNGOO_PILOT_TEST\\Python_test\\Merge\\Annotation_Merge7.shp"
#ShpDir = "D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\"
ShpDir = "D:\\TAUNGOO_PILOT_TEST\\Python_test\\CadAnnoToShp\\test\\"

file_list = ""

for file in os.listdir(ShpDir):
    if file.endswith(".shp"):
        shp = "'" + ShpDir + file + "'"
        file_list = file_list + shp + ";"
file_list = file_list[:-1]
print(file_list)

#in_shp = "'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_483_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_490_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_491_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_492_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_492_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_496_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_498_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_499_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_502_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_504_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_521_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_523_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_524_amyanmar_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_524_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_525_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_525_b_myanmar_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_527_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_527_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_528_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_528_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_529_A_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_530_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_530_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_532_a_myanmar_no_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_532_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_532_c_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_533_a_myanmar_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_533_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_534_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_535_a_myanmar_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_535_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_542_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_546_b_myanmar_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_547_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_548_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_549_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_550_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_551_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_551_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_551_c_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_552_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_552_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_552_c_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_554_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_555_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_556_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_557_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_558_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_573_myanmar_no_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_574_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_580_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_580_b_plotnomyan_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_584_b_myanmarnbo_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_585_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_586_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_588_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_588_c_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_588_d_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_589_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_589_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_590_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_591_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_591_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_591_c_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_592_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_593_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_596_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_600_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_600_c_myanno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_601_a_myanno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_601_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_601_c_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_601_d_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_602_a_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_602_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_602_c_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_603_a_myanno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_603_b_myanmarno_dxf_Annotation.shp';'D:\\TAUNGOO_PILOT_TEST\\Python_test\\out_shp\\k_606_b_myanmarno_dxf_Annotation.shp'"
in_shp = file_list

para_list = "Text \\\"Plot Number\\\" true true false 50 Text 0 0 ,First,#,"

for file in os.listdir(ShpDir):
    if file.endswith(".shp"):
        shp = ShpDir + file
        para_list = para_list + shp + ",Text,-1,-1" + ","
para_list = '"' + para_list[:-1] + '"'
print(para_list)
arcpy.Merge_management(in_shp, Annotation_Merge_shp, para_list)
print("Annotations to shp conversion complete")
                                            newfList[oldfList.index(f)])

        # Run spatial join
        spOut = os.path.join(scratchGDB, poi + '_spjoin')
        if not arcpy.Exists(spOut):
            arcpy.SpatialJoin_analysis(poi, wardsFC, spOut, 'JOIN_ONE_TO_MANY')
            print('      1 - Created spatial join layer')

        # Collect spatial join outputs in a list
        mergeList.append(spOut)

# ------------------------------------------- Creating Error Outputs ------------------------------------------- #

## ERROR OUTPUT 1 ----- for poi's that are outside spatial extent of specified ward
spatialErrors = os.path.join(errorsGDB, 'poi_spatial_anomalies_' + date)
arcpy.Merge_management(mergeList, spatialErrors)

# Clean up
fList = [
    'poi_wardcode', 'poi_wardname', poi_idField, 'poi_lganame', 'poi_lgacode',
    'poi_statecode', 'poi_statename', 'wardname', 'wardcode', 'lganame',
    'lgacode', 'statename', ward_idField, 'poi_file_name'
]

for f in arcpy.ListFields(spatialErrors):
    if f.name in fList or f.required:
        pass
    else:
        arcpy.DeleteField_management(spatialErrors, f.name)

# Add error flagging fields
        df.panToExtent(newextent)  #zur neuen Kachel schwenken
        kachelx = kachelx + 1  #Spaltenzaehler erhoehen
    #Sprung in neue Zeile
    newextent.XMin = startextent.XMin
    newextent.XMax = startextent.XMax
    newextent.YMax = df.extent.YMax - hoehe
    newextent.YMin = df.extent.YMin - hoehe
    df.extent = newextent
    df.panToExtent(newextent)  #zur neuen Kachel schwenken
    kachelx = 1  #Spaltenzaehler zuruecksetzen
    kachely = kachely + 1  #Reihenzaehler erhoehen

#MLClassification durchfuehren
from arcpy.sa import *

arcpy.CheckOutExtension("Spatial")
a = 1
polyList = []
print("MaxLikeClassification...")
for kachel in kacheln:
    mlcOut = MLClassify(kachel, gsgfile, "", "", "", "")
    polyPfad = os.path.join(ausgabepfad, (r"MLC_%03d.shp" % a))
    polyList.append(polyPfad)
    arcpy.RasterToPolygon_conversion(mlcOut, polyPfad, "NO_SIMPLIFY", "")
    print(a)
    a = a + 1
print("Merging...")
arcpy.Merge_management(polyList, os.path.join(ausgabepfad, r"mergedPolygon"))
print("Fertig.")
warte = input()
Ejemplo n.º 30
0
    projList.append(projFile)

print "Step 3 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 4. Merge features
## Description: Merge three states

print "\nStep 4 starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

# Output
mergeFile = os.path.join(out_gdb, "Dams_us")

arcpy.Merge_management(projList, mergeFile)

print "Step 4 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 5. Clip
## Description: Clip to the boundaries of the study area

print "\nStep 5 Clip the features starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

# Create the clip_features
folderShapefiles = "C:\\GIS_RGB\\Geodatabase\\rgb_bound\\"
listfc = ["RGB_Basin_na_albers.shp", "RGB_Ses_na_albers.shp"]
xy_tolerance = ""