Ejemplo n.º 1
0
    def export_custom_datasets(self, dest_folder):
        list_datasets = []
        root = ET.Element("CustomData")
        for dataset in self.get_custom_datasets():
            if not path.exists(
                    dest_folder
            ):  # make dir only if one or more custom dataset exists
                makedirs(dest_folder)
            desc = arcpy.Describe(dataset)
            msg = ""
            if desc.dataType == "FeatureClass":
                if int(arcpy.GetCount_management(dataset).getOutput(0)) > 0:
                    try:
                        arcpy.FeatureClassToFeatureClass_conversion(
                            dataset, dest_folder, desc.baseName)
                        exported = "True"
                        list_datasets.append(dataset)
                    except arcpy.ExecuteError:
                        exported = "False"
                        msg = arcpy.GetMessages()
                else:
                    exported = "False"
                    msg = "No Features found"
            elif desc.dataType == "Table":
                if int(arcpy.GetCount_management(dataset).getOutput(0)) > 0:
                    try:
                        arcpy.TableToTable_conversion(
                            dataset, dest_folder,
                            "{}.{}".format(desc.baseName, "dbf"))
                        exported = "True"
                        list_datasets.append(dataset)
                    except arcpy.ExecuteError:
                        exported = "False"
                        msg = arcpy.GetMessages()
                else:
                    exported = "False"
                    msg = "No Records in Table"
            elif desc.dataType == "RasterDataset":
                try:
                    arcpy.RasterToOtherFormat_conversion(
                        dataset, dest_folder, "TIFF")
                    exported = "True"
                    list_datasets.append(dataset)
                except arcpy.ExecuteError:
                    exported = "False"
                    msg = arcpy.GetMessages()
            else:
                exported = "False"
                msg = "Unsupported dataType for export"

            nodeDataset = ET.SubElement(root, desc.dataType)
            nodeDataset.set("exported", str(exported))
            ET.SubElement(nodeDataset, "Name").text = desc.baseName
            ET.SubElement(nodeDataset, "Source").text = dataset
            if msg:
                ET.SubElement(nodeDataset, "Message").text = str(msg)
        if list_datasets:
            indent(root)
            tree = ET.ElementTree(root)
            tree.write(path.join(dest_folder, "CustomData.xml"), 'utf-8', True)
        return list_datasets
Ejemplo n.º 2
0
#First use script for "Split By Attributes (Analysis)" for both unique points and geographies
#then ammend the paths  and double check the parameters are set properly in the function calls down below

import arcpy, os

arcpy.env.overwriteOutput = True

#paths
pointF = ""  #"C:/Users/Ignacio/Desktop/finaloutside/prec" -> geocoded points split by unique values in attribute table-> folder with the geographies split into individual geometries based on unique value
geographyF = ""  #"C:/Users/Ignacio/Desktop/finaloutside/geographies" -> folder with the geographies split into individual geometries based on unique value
mainP = ""  #"C:/Users/Ignacio/Desktop/finaloutside/Export.shp" -> a shapefile with the entire point dataset
field = ""  #"Precinct" -> field which both geographies and points have in common, in the example the geographies are split by precinct unique values and the points are being split by the same attribute, this means that we are checking that the points geocoded are falling within the geographic bounds of what the tabular data specifies

#auto get all unique values from field into var myValues | in my case getting all unique precinct values
with arcpy.da.SearchCursor(mainP, field) as cursor:
    myValues = sorted({row[0] for row in cursor})
#loop through all the values in designated fields and create selections by locations and then invert to get outside points.
for value in myValues:
    #make feature layer management for each unique valued points layer
    arcpy.MakeFeatureLayer_management(pointF + "/" + str(value) + ".shp",
                                      "geog")
    #see what from the point layer intersects the unique valued geography layer with a 150 ft search distance and then invert selection so we are only gettin what is outside
    arcpy.SelectLayerByLocation_management(
        "geog", "intersect", geographyF + "/" + str(value) + ".shp",
        "150 feet", "", "INVERT")
    #ssave each iteration of the analysis into a selected folder (FOLDER PATH needs to be changed to your own results folder)
    arcpy.FeatureClassToFeatureClass_conversion(
        "geog", "C:/Users/Ignacio/Desktop/finaloutside/results",
        str(value) + "_outside")
print "Finished"
Ejemplo n.º 3
0
def SolarMain(workspace, search_query, building_footprints, dsm_surface,
              time_configuration, output_location_parameter, output_root):
    """This function automates the SEEP Solar Model - which yields solar installation estimates for buildings in a feature class"""
    try:
        workspace_location = os.path.dirname(workspace)
        #arcpy.AddMessage(workspace_location)

        fc = building_footprints
        fields = ['Address', 'Area', 'OBJECTID', 'SHAPE@']
        sql_where_clause = search_query
        cell_size = 0.5
        desc = arcpy.Describe(fc)
        sr = desc.spatialReference
        output_location = output_location_parameter + os.sep

        # SCRIPT OUTPUT LIST (for Merge function)
        seep_sol_map_list = []

        # ENVIRONMENT SETTINGS
        arcpy.env.workspace = workspace
        arcpy.env.scratchworkspace = workspace
        arcpy.env.cellSize = cell_size
        arcpy.env.overwriteOutput = True
        arcpy.env.outputCoordinateSystem = sr

        # CHECKOUT EXTENSIONS
        arcpy.CheckOutExtension("Spatial")

        # ===== Main Loop - For each row print address and area of building, based on where clause =====
        cursor = arcpy.da.SearchCursor(fc,
                                       fields,
                                       where_clause=(sql_where_clause))
        for row in cursor:
            # -- Initialize function variables
            object_id = str(row[2])
            fc_out = workspace + os.sep + 'SInt_' + object_id
            select_clause = "Address = " + "'" + row[0] + "'"
            out_raster = workspace + os.sep + 'SInt_r' + object_id
            field = fields[2]  # for building height

            # -- SetExtent - around Study area
            extent = desc.extent
            arcpy.env.extent = extent

            # -- Create individual feature in_feature, using Select_analysis()
            arcpy.Select_analysis(fc, fc_out, select_clause)

            # -- Create in_feature
            in_feature = arcpy.Describe(fc_out)

            # -- SetExtent - around building
            extent = in_feature.extent
            arcpy.env.extent = extent

            # -- Get points to run solar radiation functions on - Feature to Raster
            arcpy.FeatureToRaster_conversion(fc_out, field, out_raster,
                                             cell_size)

            # -- Raster to Point around building
            #      Initialize function variables
            in_raster = out_raster
            out_point_feature = workspace + os.sep + 'SInt_p' + object_id

            arcpy.RasterToPoint_conversion(in_raster, out_point_feature)

            # -- Run Solar Points - on building rooftop
            #      Init solar variables
            in_point_feature = out_point_feature
            out_sol_feature = workspace + os.sep + 'SInt_SolRaw_' + object_id
            diffuse_model_type = ""
            diffuse_proportion = 0.3
            transmittivity = 0.5

            #      Extend Extent for Solar Radiation calculations (250 m)
            in_buffer = fc_out
            out_buffer = workspace + os.sep + 'SInt_BExtent_' + object_id
            distance = '250 Meters'
            arcpy.Buffer_analysis(in_buffer, out_buffer, distance)

            #      Set new Extent to environment parameters
            buffer_obj = arcpy.Describe(out_buffer)
            arcpy.env.extent = buffer_obj.extent

            arcpy.sa.PointsSolarRadiation(dsm_surface, in_point_feature,
                                          out_sol_feature, "", "", "",
                                          time_configuration, "", "", "", "",
                                          "", "", "", "", diffuse_model_type,
                                          diffuse_proportion, transmittivity,
                                          "", "", "")

            # -- Create Solar Map - Feature to Raster

            #      Initialize
            in_sol_map = out_sol_feature
            sol_field = 'T0'
            out_sol_map = workspace + os.sep + 'SO_SM' + object_id

            #      Set Extents around building again
            extent = in_feature.extent
            arcpy.env.extent = extent

            #      Execute Function
            arcpy.FeatureToRaster_conversion(in_sol_map, sol_field,
                                             out_sol_map, cell_size)

            # -- Generate suitable solar panel area total (total potential area)
            #        See Esri Blog - Solar Siting
            #      Initialization
            in_reclass_raster = out_sol_map
            reclass_field = "Value"

            #      Reclassify - ideal in class 3
            out_reclass = arcpy.sa.Reclassify(
                in_reclass_raster, reclass_field,
                arcpy.sa.RemapRange([[0.0, 900000.0, 1],
                                     [900000.01, 1000000.0, 2],
                                     [1000000.01, 1500000.0, 3]]))

            #      Raster to Polygon (simplify) - using out_reclass as an input
            out_rc_feature = workspace + os.sep + 'SInt_RC_' + object_id
            arcpy.RasterToPolygon_conversion(out_reclass, out_rc_feature)

            #      Select from Reclassified polygon - only class 3 for solar panel area
            rc_where_clause = "gridcode = 3"
            out_ideal_sol = workspace + os.sep + 'SOut_Ideal_' + object_id
            arcpy.Select_analysis(out_rc_feature, out_ideal_sol,
                                  rc_where_clause)

            # -- Determine mean solar rad on ideal rooftop location
            #     Initialize

            #     Check if out_ideal_sol has a feature
            in_zone_data = out_ideal_sol

            #     Continue Initialization
            zone_field = "gridcode"
            in_value_raster = out_sol_map
            out_table = workspace + os.sep + 'SInt_IRad_' + object_id

            #     Execute
            try:
                arcpy.sa.ZonalStatisticsAsTable(in_zone_data, zone_field,
                                                in_value_raster, out_table)
            except:
                arcpy.sa.ZonalStatisticsAsTable(out_rc_feature, zone_field,
                                                in_value_raster, out_table)

            actual_rad_cursor = arcpy.da.SearchCursor(out_table, ['MEAN'])
            actual_rad = 0.0

            for out_table_row in actual_rad_cursor:
                actual_rad = float(out_table_row[0])

            # -- Determine Ideal Rooftop Area - limited to 85% of ideal area (for irregular shapes)
            #       uses Statistics_analysis
            #      Initialize
            in_stats = out_ideal_sol
            out_stats = workspace + os.sep + 'SInt_StatA_' + object_id
            statistics_field = [["Shape_Area", "SUM"]]

            #      Execute
            arcpy.Statistics_analysis(in_stats, out_stats, statistics_field)
            ideal_rooftop_area = arcpy.da.SearchCursor(out_stats,
                                                       ['Sum_Shape_Area'])
            rooftop_area = 0.0

            for rooftop_row in ideal_rooftop_area:
                rooftop_area = float(rooftop_row[0]) * 0.85

            # -- Calculate System Estimates using SEEP Estimation Model (a text file)
            #     Calculation Constants:
            lifetime = 33.0
            average_sun_hr = 6.7
            cdn_rate = 0.76
            dc_ac_ratio = 1.1
            reference_rad = 1000.0
            temp_co = -0.0047
            temp_ref = 25
            temp_cell = 17
            cost_initial = 0.0
            cost_maint = 0.0
            system_loss = 0.86
            inverter_loss = 0.96
            area_rating_ratio = 168.3

            #     Variable Calculations
            actual_rad_hr = actual_rad / 365.0 / average_sun_hr
            np_rating = rooftop_area * area_rating_ratio
            #arcpy.AddMessage('System Rating: ' + str(np_rating) + ' W')

            dc_power = (actual_rad_hr /
                        reference_rad) * np_rating * (1 +
                                                      (temp_co *
                                                       (temp_cell - temp_ref)))
            ac_power = (dc_power / dc_ac_ratio) * (system_loss * inverter_loss)

            #     Defining Costs
            if np_rating < 10000:
                cost_initial = 3000.0 * (np_rating / 1000.0)

            if (np_rating >= 10000) and (np_rating < 100000):
                cost_initial = 2900.0 * (np_rating / 1000.0)

            if np_rating < 10000.0:
                cost_maint = 21.0 * (np_rating / 1000.0)

            if (np_rating >= 10000.0) and (np_rating < 100000.0):
                cost_maint = 19.0 * (np_rating / 1000.0)

            total_system_cost = (cost_initial + cost_maint) / cdn_rate

            power_cost = 0.0
            if ac_power > 0:  # Prevents divide by zero errors when no AC power is projected
                power_cost = (
                    total_system_cost /
                    (ac_power / 1000)) / lifetime / 365 / average_sun_hr

            #arcpy.AddMessage('AC output: ' + str(ac_power) + ' W')
            #arcpy.AddMessage('System cost: $' + str(total_system_cost))
            #arcpy.AddMessage('Resulting amortized power cost: $' + str(power_cost))

            # -- Return Useful Area & Calculations to Feature Class (fc_out)
            #     Initialize
            seep_output = fc_out
            output_fields = [
                'System_Rating', 'AC_Power', 'System_Cost', 'Power_Cost'
            ]

            #     Add fields (System rating, AC Power, System Cost, Power Cost) to Output Feature
            arcpy.AddField_management(seep_output, output_fields[0], "FLOAT")
            arcpy.AddField_management(seep_output, output_fields[1], "FLOAT")
            arcpy.AddField_management(seep_output, output_fields[2], "FLOAT")
            arcpy.AddField_management(seep_output, output_fields[3], "FLOAT")

            #     Update values in new fields
            with arcpy.da.UpdateCursor(seep_output, output_fields) as cursor:
                for update_row in cursor:
                    update_row[0] = np_rating
                    update_row[1] = ac_power
                    update_row[2] = total_system_cost
                    update_row[3] = power_cost
                    cursor.updateRow(update_row)
            #     END UpdateCursor Loop

            #     Save feature class as an output
            output_path = workspace + os.sep
            output_name = 'SOut_Data_' + object_id
            seep_output_fc = output_path + output_name
            arcpy.FeatureClassToFeatureClass_conversion(
                seep_output, output_path, output_name)

            # -- Append Feature Class & Raster List
            #seep_data_list.append(r"" + seep_output_fc)
            seep_sol_map_list.append(out_sol_map)

            #Delete Intermediates
            del extent, in_feature, buffer_obj, out_reclass, actual_rad_cursor, ideal_rooftop_area, cursor

            #arcpy.AddMessage(('Completed: {0}, {1} in {2}'.format(row[0], row[1], sql_where_clause)))
            arcpy.AddMessage('Building analysis completed: ' + object_id)

            #=========================== END MAIN LOOP ==========================================

        arcpy.AddMessage('Buildings processed, starting merge...')

        # -- The Merge (of all calculations done during this script)
        #      Initialize
        seep_out_data = output_location + 'SO_' + output_root
        seep_out_raster = 'SM' + output_root
        pixel_type = "64_BIT"

        #arcpy.AddMessage('Initialized...')

        #      Retrieve List of Feature Outputs
        seep_data_list_raw = arcpy.ListFeatureClasses("SOut_Data_*")

        seep_data_list = []
        for s in seep_data_list_raw:
            ds = arcpy.Describe(s)
            seep_data_list.append(ds.catalogPath)

        #      Merge Raster Solar Maps (create raster dataset, workspace to raster dataset)
        try:
            arcpy.CreateRasterDataset_management(output_location,
                                                 seep_out_raster, cell_size,
                                                 pixel_type)
        except:
            print 'Raster dataset exists already, proceeding...'

        try:
            arcpy.Mosaic_management(seep_sol_map_list,
                                    output_location + seep_out_raster)
        except:
            print 'No data for Mosaic - proceeding...'

        # -- Reset environment to proper extent
        extent = desc.extent
        arcpy.env.extent = extent

        #      Merge Feature Classes
        arcpy.Merge_management(seep_data_list, seep_out_data)

        # -- Clean-Up
        try:
            arcpy.Delete_management(workspace)
            arcpy.CreateFileGDB_management(workspace_location,
                                           os.path.basename(workspace))
            arcpy.AddMessage('Workspace reset...')
        except:
            arcpy.AddMessage('Workspace was not reset...')

        del extent

        pass
    except arcpy.ExecuteError:
        print arcpy.GetMessages(2)
    except Exception as e:
        print e.args[0]
Ejemplo n.º 4
0
    def run(self):

        tbx = self.parent_tbx

        # get amrkets in minimal bounding polygon (in fact multiple rectangles,
        # as always there is no basic function for minimal bounding polygon)
        communities = self.folders.get_table('Zentren')
        multi_poly = minimal_bounding_poly(communities, where='"Auswahl"<>0')

        epsg = self.parent_tbx.config.epsg
        multi_poly = [[Point(p.X, p.Y, epsg=epsg) for p in poly if p]
                      for poly in multi_poly]

        epsg = tbx.config.epsg
        arcpy.AddMessage('Sende Standortanfrage an Geoserver...')
        reader = OSMShopsReader(epsg=epsg)
        truncate = self.par.truncate.value
        if truncate:
            ids = [str(i) for i, in tbx.query_table(
                self._markets_table, columns=['id'], where='is_osm=1')]
            if len(ids) > 0:
                arcpy.AddMessage(u'Lösche vorhandene OSM-Märkte...')
                n = tbx.delete_rows_in_table(self._markets_table, where='is_osm=1')
                tbx.delete_rows_in_table('Beziehungen_Maerkte_Zellen',
                                         where='id_markt in ({})'
                                         .format(','.join(ids)))
                arcpy.AddMessage(u'{} OSM-Märkte gelöscht'.format(n))
            else:
                arcpy.AddMessage(u'Keine OSM-Märkte vorhanden.')
        #if self.par.count.value == 0:
            #return

        markets = []
        for poly in multi_poly:
            m = reader.get_shops(poly, count=self._max_count-len(markets))
            markets += m
        arcpy.AddMessage(u'{} Märkte gefunden'.format(len(markets)))
        arcpy.AddMessage(u'Analysiere gefundene Märkte...'
                         .format(len(markets)))

        markets = self.parse_meta(markets)
        arcpy.AddMessage(u'Schreibe {} Märkte in die Datenbank...'
                         .format(len(markets)))

        markets_tmp = self.folders.get_table('markets_tmp', check=False)
        auswahl_tmp = self.folders.get_table('auswahl_tmp', check=False)
        clipped_tmp = self.folders.get_table('clipped_tmp', check=False)
        def del_tmp():
            for table in [markets_tmp, clipped_tmp, auswahl_tmp]:
                arcpy.Delete_management(table)
        del_tmp()

        markets_table = self.folders.get_table('Maerkte', check=False)
        ids = [id for id, in self.parent_tbx.query_table(markets_table, ['id'])]
        start_id = max(ids) + 1 if ids else 0
        # write markets to temporary table and clip it with selected communities
        arcpy.CreateFeatureclass_management(
            os.path.split(markets_tmp)[0], os.path.split(markets_tmp)[1],
            template=markets_table
        )
        self.markets_to_db(markets,
                           tablename=os.path.split(markets_tmp)[1],
                           truncate=False,  # already truncated osm markets
                           is_buffer=False,
                           is_osm=True,
                           start_id=start_id)

        arcpy.FeatureClassToFeatureClass_conversion(
            communities, *os.path.split(auswahl_tmp),
            where_clause='Auswahl<>0')
        arcpy.Clip_analysis(markets_tmp, auswahl_tmp, clipped_tmp)

        arcpy.Append_management(clipped_tmp, markets_table)
        del_tmp()
        arcpy.AddMessage('Entferne Duplikate...')
        n = remove_duplicates(self.folders.get_table(self._markets_table),
                              'id', match_field='id_kette',
                              where='is_osm=1', distance=50)
        arcpy.AddMessage('{} Duplikate entfernt...'.format(n))
        arcpy.AddMessage(u'Aktualisiere die AGS der Märkte...')
        self.set_ags()
Ejemplo n.º 5
0
import arcpy
from net_ops import *

#path = r'C:\Users\Dim\Dropbox\sharing\NTUA\ECE\julie-nick-babis\180_askiseis\Data'
path = r'C:\Users\Dim\Documents\GitHub\GIS_SCRIPTS\Data'
arcpy.env.workspace = path

feature_list = arcpy.ListFeatureClasses()
print feature_list

x = Network_Operations(path)

print 'x is ' + str(x)

airport = r'C:\Users\Dim\Documents\GitHub\GIS_SCRIPTS\Data\ne_50m_airports\ne_50m_airports.shp'
countries = r'C:\Users\Dim\Documents\GitHub\GIS_SCRIPTS\Data\ne_50m_admin_0_countries\ne_50m_admin_0_countries.shp'

outpath = r'C:\Users\Dim\Documents\GitHub\GIS_SCRIPTS\Data\outputs'

arcpy.MakeFeatureLayer_management(airport, 'points_layer')
arcpy.MakeFeatureLayer_management(countries, 'countries_layer',
                                  """ "name" = 'Mexico' """)

arcpy.SelectLayerByLocation_management('points_layer', 'WITHIN',
                                       'countries_layer')
arcpy.FeatureClassToFeatureClass_conversion('points_layer', outpath,
                                            'airports_in_mexico')
Ejemplo n.º 6
0
arcpy.env.overwriteOutput = True

infilename = arcpy.GetParameterAsText(0)  #as CSV
infile = arcpy.GetParameter(0)
outfile = arcpy.GetParameterAsText(1)  #as GDB
outname = outfile + '\\AllPoints'
csvFile = os.path.basename(infilename)
spRef = arcpy.SpatialReference(
    "NAD 1983 StatePlane Missouri East FIPS 2401 (US Feet)")

if arcpy.Exists(outfile) == False:
    arcpy.AddMessage("Creating GDB...")
    arcpy.CreateFileGDB_management(os.path.dirname(outfile),
                                   os.path.basename(outfile))

arcpy.AddMessage("Copying Rows...")
for inputs in infile:
    arcpy.AddMessage(inputs)
    if arcpy.Exists(outname) == False:
        arcpy.CopyRows_management(csvFile, outname)
    else:
        arcpy.Append_management(inputs, outfile + '/AllPoints', 'NO_TEST', '',
                                '')

arcpy.AddMessage("Making Point Features...")
arcpy.MakeXYEventLayer_management(outname, "XCoord", "YCoord", "Temp_Points",
                                  spRef, "")
arcpy.FeatureClassToFeatureClass_conversion("Temp_Points", outfile,
                                            'STL_CRIME_POINTS')
arcpy.Delete_management(outname)
##
## Set variables
##
uspsGDBLocation           = arcpy.GetParameterAsText(0)
theFeatureClass           = arcpy.GetParameterAsText(1)
isCheckedNewFC            = arcpy.GetParameterAsText(2)
newFCName                 = arcpy.GetParameterAsText(3)
out_path                  = arcpy.GetParameterAsText(4)
##
## this checks whether the checkbox for creating a new feature class is checked 
## OR
## to use the initial project geography
##
if (str(isCheckedNewFC)) == 'True':
	analysisFeatureClass  = newFCName
	arcpy.FeatureClassToFeatureClass_conversion(theFeatureClass,out_path, analysisFeatureClass)
else:
	analysisFeatureClass  = theFeatureClass
##
## This saves the current workspace as a variable then sets the workspace to the location of USPS DBFs GDB
##
currentWorkspace    = arcpy.env.workspace
arcpy.env.workspace = uspsGDBLocation
##
## These are the variables that data are pulled from
##
theCalculations  = [["ams_res", "SUM"], ["res_vac", "SUM"],["vac_3_res", "SUM"]]
newTables        = []
start_time       = time.time()
myList           = []
##
Ejemplo n.º 8
0
            else:
                arcpy.DeleteField_management(finalDir + '/' + city + '_Floodplain', field)

    reportFile.write('Finished with Floodplain.--' + time.strftime('%Y%m%d--%H%M%S') + '--\n')
    print 'Finished with Floodplain: ' + time.asctime()

    #-------- COPY FINAL DRINKING WATER AND HISTORIC PLACES TABLES -----------
    try:
        arcpy.CopyRows_management(workFld + '/' + city + '_historical_places.gdb/' + city + '_historical_places', city + '_historical_places')
        arcpy.CopyRows_management(workFld + '/' + city + '_DWDemand.gdb/' + city + '_DWDemand', city + '_DWDemand')
    except:
        print 'Either Drinking Water Demand or Historical Places is missing. Please find and rerun.'

    #-------- CREATE FINAL Combined BG TABLE ---------------------------------
    """ Create the blank FC """
    arcpy.FeatureClassToFeatureClass_conversion(finalDir + '/' + city + '_BG', finalDir, city + '_BG_AllData')
    BGFields = [f.name for f in arcpy.ListFields(finalDir + '/' + city + '_BG_AllData')]
    for field in BGFields:
        if field not in ['bgrp']:
            try:
                arcpy.DeleteField_management(finalDir + '/' + city + '_BG_AllData', field)
            except:
                pass
        else:
            pass


    finalTbls = arcpy.ListTables()
    allTbls = [t for t in finalTbls]
    while len(allTbls) > 13:
        for t in allTbls:
wks = arcpy.ListWorkspaces('*', 'FileGDB')
# Skip the Master GDB
wks.remove(MasterGDBLocation)

for fgdb in wks:
    # Set name of geodatabase
    KmlName = fgdb[fgdb.rfind(os.sep) + 1:-4]
    #print "KmlName: " + KmlName

    # Change the workspace to the current FileGeodatabase
    arcpy.env.workspace = fgdb

    # For every Featureclass inside, copy it to the Master and use the name from the original fGDB
    featureClasses = arcpy.ListFeatureClasses('*', '', 'Placemarks')
    for fc in featureClasses:
        print "Adding KmlName field to " + fgdb + os.sep + 'Placemarks' + os.sep + fc
        arcpy.AddField_management(fgdb + os.sep + 'Placemarks' + os.sep + fc,
                                  "KmlName",
                                  "TEXT",
                                  field_length=10)
        arcpy.CalculateField_management(
            fgdb + os.sep + 'Placemarks' + os.sep + fc, "KmlName",
            '"' + KmlName + '"', "PYTHON")
        #print "COPYING: " + fc + " FROM: " + fgdb
        fcCopy = fgdb + os.sep + 'Placemarks' + os.sep + fc
        arcpy.FeatureClassToFeatureClass_conversion(fcCopy, MasterGDBLocation,
                                                    KmlName + "_" + fc)

# Clean up
del kmz, wks, fc, featureClasses, fgdb
sys.path.append(r'T:\FS\Reference\GeoTool\r01\Script')
import NRGG

sys.path.append(r'T:\FS\Reference\GeoTool\r01\Script\ADSFunctions')
import ADSFunctions

###################### Input Variables ##################################
featureClass = #Example : r'T:\FS\NFS\R01\Program\3400ForestHealthProtection\GIS\R01\ADS\Archived\Yearly\WithFNF\2020\R1R4_ADS_2020_FinalDatsets.gdb\R1ADS2020Damage'
year = 2020

outputGDB = # Example : r'T:\FS\NFS\R01\Program\3400ForestHealthProtection\GIS\Kellner\Expanded_Data\R1_Expanded_ADS_Tables\R1ADS_SingleDCAValue_Tables_2020.gdb'
###########################################################################

copyName = '{}_copy'.format(os.path.basename(featureClass))
arcpy.FeatureClassToFeatureClass_conversion(featureClass, outputGDB, copyName)

ADSFunctions.makeCopyOfOriginalOBJECTID('{}_copy'.format(os.path.basename(featureClass)))
tableName = '{}_TableView'.format(os.path.basename(featureClass))
arcpy.MakeTableView_management(os.path.join(outputGDB, copyName), tableName)

DCAValues = NRGG.uniqueValuesFromFeatureClassField(featureClass, 'DCA_CODE')
for DCAValue in DCAValues:
    selectTableName = 'ADS_Expanded_{}_{}'.format(DCAValue, year)
    arcpy.TableSelect_analysis(
        tableName,
        os.path.join(outputGDB, selectTableName),
        'DCA_CODE = {}'.format(DCAValue))

    arcpy.AddField_management(selectTableName, 'ORIGINAL_ID', 'LONG')
    arcpy.CalculateField_management(
Ejemplo n.º 11
0
#Read in the inspection list from jupyter notebook output
print "Reading in inspection list csv file"
newFile = "C:\Users\dmehri\Documents\DATA\ArcGIS\Spatial_Join\Sustainability_Inspection_List.csv"
arcpy.MakeTableView_management(in_table=newFile, out_view='InspectionList')

#print "Done"

#arcpy.env.outputCoordinateSystem = arcpy.SpatialReference("NAD 1983 (2011)")
#Display XY coordinates before projecting
arcpy.MakeXYEventLayer_management("InspectionList", "Longitude Point",
                                  "Latitude Point", "InspectionList_points",
                                  "NAD 1983 (2011)", "")

#export
outpath = "C:\Users\dmehri\Documents\DATA\ArcGIS\Spatial_Join\output"
arcpy.FeatureClassToFeatureClass_conversion("InspectionList_points", outpath,
                                            "InspectionListPoints.shp")

#Project the points
#set input feature as savled shape file
input_features = "C:\Users\dmehri\Documents\DATA\ArcGIS\Spatial_Join\output\InspectionListPoints.shp"

#ouput data
output_feature_class = "C:\Users\dmehri\Documents\DATA\ArcGIS\Spatial_Join\output\InspectionListProjectedPoints.shp"
# create a spatial reference object for the output coordinate system
out_coordinate_system = arcpy.SpatialReference(
    'NAD 1983 (2011) StatePlane New York Long Isl FIPS 3104 (US Feet)')
#run the projected points tool
arcpy.Project_management(input_features, output_feature_class,
                         out_coordinate_system)

#Now the projected points are exported, delete layers not needed anymore
Ejemplo n.º 12
0
import arcpy

in_csv = 'C:/Users/dgoodma7/Documents/Data/LPC_LL_OpenData_2015Nov.csv'

df = pd.read_csv(in_csv)

df['longitude'] = df['the_geom'].str.split('(').str[1].str.split(' ').str[0]
df['latitude'] = df['the_geom'].str.split(' ').str[2].str.replace(')', "")

# print(df.head())

df = df[(df['LM_TYPE'] == 'Individual Landmark')]  # query to only select borough ID manhattan

out_csv = in_csv.replace('.csv','_clean2.csv')


df.to_csv(out_csv,index=False)

arcpy.MakeXYEventLayer_management(
    out_csv,
    'longitude',
    'latitude',
    'in_memory_xy_layer',
)

arcpy.FeatureClassToFeatureClass_conversion(
    'in_memory_xy_layer',
    'C:/Users/dgoodma7/Documents/Data/',
    'landmarks_mn_indiv.shp',
)
Ejemplo n.º 13
0
arcpy.SpatialJoin_analysis(Meatballs, All_CORS2_Buffer__2_, count_overlaps, "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "WITHIN", "", "")

# Process: Join Field
arcpy.JoinField_management(Spaghetti, oidfield, count_overlaps, "ORIG_FID", "Join_Count")

# Process: Make Feature Layer (2)
arcpy.MakeFeatureLayer_management(Output_Feature_Class, Features_to_Delete, "Join_Count = 0", "", "FID_empty FID_empty VISIBLE NONE;Shape_length Shape_length VISIBLE NONE;Shape_area Shape_area VISIBLE NONE;Join_Count Join_Count VISIBLE NONE")

# Process: Delete Features
arcpy.DeleteFeatures_management(Features_to_Delete)

# Process: Delete
arcpy.Delete_management(Features_to_Delete, "")

# Process: Feature Class to Feature Class (4)
arcpy.FeatureClassToFeatureClass_conversion(Output_Feature_Class, v_scratchworkspace___4_, "Threatened_coverage_poor", "Join_Count <3", "FID_empty \"FID_empty\" true true false 0 Long 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,FID_empty,-1,-1;Shape_length \"Shape_length\" true true false 0 Double 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,Shape_length,-1,-1;Shape_area \"Shape_area\" true true false 0 Double 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,Shape_area,-1,-1;Join_Count \"Join_Count\" true true false 0 Long 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,Join_Count,-1,-1", "")

# Process: Add Field (6)
arcpy.AddField_management(Threatened_coverage_poor, "Coverage", "TEXT", "", "", "20", "Coverage", "NULLABLE", "NON_REQUIRED", "")

# Process: Calculate Field (5)
arcpy.CalculateField_management(Threatened_coverage_poor__4_, "Coverage", "\"Inadequate Coverage\"", "PYTHON_9.3", "")

# Process: Dissolve (4)
arcpy.Dissolve_management(Threatened_coverage_poor__2_, Threatened_coverage_poor_Dis__3_, "Coverage", "", "MULTI_PART", "DISSOLVE_LINES")

# Process: Feature Class to Feature Class
arcpy.FeatureClassToFeatureClass_conversion(Output_Feature_Class, v_scratchworkspace___2_, "Secure_coverage", "Join_Count >2", "FID_empty \"FID_empty\" true true false 0 Long 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,FID_empty,-1,-1;Shape_length \"Shape_length\" true true false 0 Double 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,Shape_length,-1,-1;Shape_area \"Shape_area\" true true false 0 Double 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,Shape_area,-1,-1;Join_Count \"Join_Count\" true true false 0 Long 0 0 ,First,#,C:\\Workspace\\GeodeticInteractive\\Scratch\\Scratch.gdb\\AllCORS_Select_Buffer_Featur,Join_Count,-1,-1", "")

# Process: Add Field (3)
arcpy.AddField_management(Secure_coverage, "Coverage", "TEXT", "", "", "20", "Coverage", "NULLABLE", "NON_REQUIRED", "")
# 1. HEIGHT
fldMap_HEIGHT_VPs = arcpy.FieldMap()
fldMap_HEIGHT_VPs.addInputField(in_VPs, "HEIGHT")
fldMap_HEIGHT_VPs.addInputField(erased_VPs, "H")

HEIGHT_VPs_field = fldMap_HEIGHT_VPs.outputField
HEIGHT_VPs_field.name = "HEIGHT"
HEIGHT_VPs_field.aliasName = "HEIGHT"
fldMap_HEIGHT_VPs.outputField = HEIGHT_VPs_field

fieldMappings_VPs.addFieldMap(fldMap_HEIGHT_VPs)

arcpy.Merge_management([in_VPs, erased_VPs], merged_VPs, fieldMappings_VPs)

arcpy.FeatureClassToFeatureClass_conversion(merged_VPs, NEW_circuit_GDB,
                                            outVPs)

## Merge New VPs with OLD VPs based on field map

# TREETOPS

NEW_TTs = OUT_TTs

NEW_TTs_ATTRIB = os.path.join(OUTPUT_FOLDER, "TTs_NEW_ATTRIB.shp")

arcpy.gp.ET_GPNearFeature(NEW_TTs, old_Spans, NEW_TTs_ATTRIB, 300)

with arcpy.da.UpdateCursor(NEW_TTs_ATTRIB, ["ET_Dist", "H"]) as uCur:
    for row in uCur:
        row[0] = (row[0] * 3.28)
        row[1] = (row[1] * 3.28)
    arcpy.SelectLayerByAttribute_management('BackLyr', 'NEW_SELECTION', fc + "_Front.Shape_Area IS NOT NULL")
    arcpy.CalculateField_management('BackLyr', fc + '_Back.Proc', 'getProc()', 'PYTHON_9.3', 'def getProc():\n  return 1')
    arcpy.SelectLayerByAttribute_management('BackLyr', 'CLEAR_SELECTION')
    arcpy.RemoveJoin_management('BackLyr')
    
    
    
    arcpy.AddJoin_management('FrontLyr', fc[:-4] + 'APN', 'BackLyr', fc[:-4] + 'APN')
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'NEW_SELECTION', fc + "_Back.Proc IS NOT NULL")
    
    arcpy.CalculateField_management('FrontLyr', fc + '_Front.BackYard_Area', 'getVal(!' + fc + '_Back.Shape_Area!)', 'PYTHON_9.3', codeblock)
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'CLEAR_SELECTION')
    arcpy.RemoveJoin_management('FrontLyr')
    
    arcpy.SelectLayerByAttribute_management('BackLyr', 'NEW_SELECTION', "Proc IS NULL")
    arcpy.FeatureClassToFeatureClass_conversion('BackLyr', output,  fc + '_noFYBack')
    arcpy.SelectLayerByAttribute_management('BackLyr', 'CLEAR_SELECTION')


    arcpy.MakeFeatureLayer_management(output + '\\' + fc + '_noFYBack', 'NoFYBack_Lyr')    
    arcpy.Append_management('NoFYBack_Lyr', 'FrontLyr', 'NO_TEST')
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'NEW_SELECTION', 'BackYard_Area IS NULL')
    arcpy.CalculateField_management('FrontLyr', 'BackYard_Area', 'getVal(!Shape_Area!)', 'PYTHON_9.3', codeblock)
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'CLEAR_SELECTION')

    
    arcpy.Append_management('NoFYBY_Lyr', 'FrontLyr', 'NO_TEST')

    arcpy.SelectLayerByAttribute_management('FrontLyr', 'NEW_SELECTION', "FrontYard_Area IS NULL AND BackYard_Area IS NULL")
    arcpy.CalculateField_management('FrontLyr', 'NA_Area', 'getVal(!Shape_Area!)', 'PYTHON_9.3', codeblock)
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'CLEAR_SELECTION')
    'HollandCity': 'Holland City',
    'OtsegoCity': 'Otsego City',
    'PlainwellCity': 'Plainwell City',
    'WaylandCity': 'Wayland City',
    'SaugatuckCity': 'Saugatuck City',
    'South HavenCity': 'South Haven City',
    'DouglasCity': 'Douglas City'
}

for val in inputunit:
    print "Value: %s" % unit.get(val)
    unitSpace = str(unit.get(val))
    arcpy.AddMessage("processing: " + val)
    arcpy.FeatureClassToFeatureClass_conversion(
        in_features=
        "J:/Apps/Python/LayerUpdates/addresses/source//AddressData.gdb/Addresses4LocalShp",
        out_path="J:/Apps/Python/LayerUpdates/addresses/build",
        out_name=val + ".shp",
        where_clause="MUNI = '" + unitSpace + "'",
        field_mapping=
        'SITE_ID "SITE_ID" true true false 8 Double 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,SITE_ID,-1,-1;ADDRESS "ADDRESS" true true false 50 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,ADDRESS,-1,-1;PREDIR "PREDIR" true true false 2 Text 0 0  ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,PREDIR,-1,-1;NUMBER "NUMBER" true true false 5 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,NUMBER,-1,-1;NAME "NAME" true true false 40 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,NAME,-1,-1;SUFFIX "SUFFIX" true true false 4 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,SUFFIX,-1,-1;NUMBERSUP "NUMBERSUP" true true false 4 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,NUMBERSUP,-1,-1;POSTDIR "POSTDIR" true true false 2 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,POSTDIR,-1,-1;SUPPLEM "SUPPLEM" true true false 30 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,SUPPLEM,-1,-1;CITY "CITY" true true false 30 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,CITY,-1,-1;ZIP "ZIP" true true false 5 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,ZIP,-1,-1;COUNTY "COUNTY" true true false 20 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,COUNTY,-1,-1;MUNI "MUNI" true true false 50 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,MUNI,-1,-1',
        config_keyword="")

#where_clause="MUNI LIKE "+val+"%",

#arcpy.AddMessage ("finished processing")

#field_mapping='field_mapping='SITE_ID "SITE_ID" true true false 8 Double 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,SITE_ID,-1,-1, config_keyword="")

#arcpy.FeatureClassToFeatureClass_conversion(in_features="J:/Apps/Python/LayerUpdates/addresses/source//AddressData.gdb/Addresses4LocalShp",out_path="J:/Apps/Python/LayerUpdates/addresses/build",out_name = "test.shp",where_clause="",field_mapping='SITE_ID "SITE_ID" true true false 8 Double 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,SITE_ID,-1,-1;ADDRESS "ADDRESS" true true false 50 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,ADDRESS,-1,-1;PREDIRECTI "PREDIRECTI" true true false 2 Text 0 0  ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,PREDIRECTI,-1,-1;NUMBER "NUMBER" true true false 5 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,NUMBER,-1,-1;NAME "NAME" true true false 40 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,NAME,-1,-1;SUFFIX "SUFFIX" true true false 4 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,SUFFIX,-1,-1;NUMBERSUP "NUMBERSUP" true true false 4 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,NUMBERSUP,-1,-1;POSTDIRECT "POSTDIRECT" true true false 2 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,POSTDIRECT,-1,-1;SUPPLEMENT "SUPPLEMENT" true true false 30 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,SUPPLEMENT,-1,-1;CITY "CITY" true true false 30 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,CITY,-1,-1;ZIP "ZIP" true true false 5 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,ZIP,-1,-1;COUNTY "COUNTY" true true false 20 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,COUNTY,-1,-1;MUNI "MUNI" true true false 50 Text 0 0 ,First,#,J:\Apps\Python\LayerUpdates\addresses\source\AddressData.gdb\Addresses4LocalShp,MUNI,-1,-1', config_keyword="")
Ejemplo n.º 17
0
    def execute(self):

        env.workspace = self.conf.ws

        fgdb_name = self.conf.fgdb_name
        fgdb_file = self.conf.fgdb_file
        fc_name = self.conf.fc_name
        fc_file = self.conf.fc_file

        out_path = self.conf.out_path
        fgdb_name_temp = self.conf.fgdb_name_temp
        fgdb_file_tmp = self.conf.fgdb_file_tmp

        fc_name_temp = self.conf.fc_name_temp
        fc_file_temp = self.conf.fc_file_temp

        out_feature_name = self.conf.out_feature_name
        out_feature_path = self.conf.out_feature_path

        lossless = self.conf.lossless

        try:
            if arcpy.Exists(fgdb_file_tmp):
                arcpy.CompressFileGeodatabaseData_management(
                    fgdb_file_tmp, lossless)
                arcpy.Delete_management(fgdb_file_tmp)

            # Process: Create File GDB
            arcpy.CreateFileGDB_management(out_path, fgdb_name_temp, 'CURRENT')

            # Process: Create Feature Class
            template_name = 'TEMPLATE.shp'
            template_file = os.path.join(out_path, template_name)
            coordinate_system = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984'," \
                                "SPHEROID['WGS_1984',6378137.0,298.257223563]]," \
                                "PRIMEM['Greenwich',0.0],UNIT['Degree'," \
                                "0.0174532925199433]];" \
                                "-400 -400 1000000000;-100000 10000;" \
                                "-100000 10000;" \
                                "8,98315284119522E-09;0,001;0,001;IsHighPrecision "

            arcpy.CreateFeatureclass_management(fgdb_file_tmp, fc_name_temp,
                                                "POLYGON", template_file,
                                                "DISABLED", "DISABLED",
                                                coordinate_system, "", "0",
                                                "0", "0")

            # Process: Append
            field_mappings = arcpy.FieldMappings()
            # Add target dataset
            field_mappings.addTable(os.path.join(fgdb_file_tmp, fc_name_temp))
            # Add append dataset
            field_mappings.addTable(fc_file)

            arcpy.Append_management(fc_file,
                                    os.path.join(fgdb_file_tmp, fc_name_temp),
                                    "NO_TEST", "", "")

            # Process: Calculate Field
            field_name = "ZONA"
            exp_value = "valor()"
            block_value = "def valor(): return '01'"
            lang_value = "PYTHON_9.3"

            arcpy.CalculateField_management(in_table=fc_file_temp,
                                            field=field_name,
                                            expression=exp_value,
                                            expression_type=lang_value,
                                            code_block=block_value)

            field_name = "ULT_ALTE"
            exp_value = "ts()"
            lang_value = "PYTHON_9.3"
            block_value = "def ts():\\n import time\\n return time.strftime(" \
                          "\"%Y-%m-%d %H:%M:%S\", time.localtime())"

            arcpy.CalculateField_management(in_table=fc_file_temp,
                                            field=field_name,
                                            expression=exp_value,
                                            expression_type=lang_value,
                                            code_block=block_value)

            # Process: Create Feature Class
            if arcpy.Exists(out_feature_path):
                arcpy.Delete_management(out_feature_path)

            arcpy.FeatureClassToFeatureClass_conversion(
                os.path.join(fgdb_file_tmp, fc_name_temp), out_path,
                out_feature_name)

        except Exception:
            arcpy.AddMessage('Erro ao processar.')
            e = sys.exc_info()[1]
            arcpy.AddError(e.args[0])
        else:
            arcpy.AddMessage('Finalizado com sucesso!')
Ejemplo n.º 18
0
def krigingFromPointCSV(inTable,
                        valueField,
                        xField,
                        yField,
                        inClipFc,
                        workspace="assignment3.gdb"):
    #set new workspace as the geodatabase
    arcpy.env.workspace = workspace
    #allow overwrite table on
    arcpy.env.overwriteOutput = True
    #Generate an input feature later using inTable
    yearly_points = "yearly_points"
    #Convert the projection of the input feature layer to match the coordinate system of the clip feature class.
    #create spaitial reference ID
    spatial_ref1 = arcpy.Describe(inTable).spatialReference
    spatial_ref2 = arcpy.Describe(inClipFc).spatialReference
    if spatial_ref1.name != spatial_ref2.name:
        print(inTable + " needs to be projected as " + spatial_ref2 +
              " to match the coordinate system of the clip feature class.")
    else:
        print("The coordinate systems for both layers match.")
    #create new point feature class based on lat long values
    # xField: The field that contains the longitude values
    # yField: The field that contains the latitude values
    arcpy.management.XYTableToPoint(inTable, yearly_points, xField, yField)
    #Check and enable the spatial analyst extension for kriging
    try:
        if arcpy.CheckExtension("Spatial") == "Available":
            arcpy.CheckOutExtension("Spatial")
        else:
            # raise a custom exception
            raise LicenseError
    except LicenseError:
        print("Spatial Analyst license is unavailable")
    from arcpy.sa import *
    #Use KrigingModelOrdinary function to interpolate
    #define the value field
    field = valueField
    outKriging = Kriging(yearly_points, field, '#', cellSize)
    #Save the output
    outKriging.save("valueField")
    #begin clipping code
    #clip the interpolated kriging raster
    #define input clipped feature directory
    inputClipped = folder + inClipFc
    #covert flipped features
    arcpy.FeatureClassToFeatureClass_conversion(inputClipped,
                                                arcpy.env.workspace,
                                                "inputClipped")
    #create variable to define the input
    descInput = arcpy.Describe("inputClipped")
    #create the boundaries for the clipped feature
    rectangle = str(descInput.extent.XMin) + " " + str(
        descInput.extent.YMin) + " " + str(descInput.extent.XMax) + " " + str(
            descInput.extent.YMax)
    print(rectangle)
    #use clip management to clip the interpolated feature to the clipped area
    arcpy.Clip_management("valueField", rectangle, "#", "#", "#",
                          "ClippingGeometry", "MAINTAIN_EXTENT")
    outInt = Int("valueField")
    #Save the output
    outInt.save("valueFieldCI")
Ejemplo n.º 19
0
        'non_incapac_inj_count':"non_incapac_injuries_NBR<>0 and fatalities_nbr=0 and incapac_injuries_nbr=0",
        'possible_inj_count':"possible_injuries_nbr<>0 and FATALITIES_NBR=0 and non_incapac_injuries_nbr=0 and incapac_injuries_nbr=0"
        }
        
# add and populate fields for point layer
for key in dict:
    arcpy.AddField_management(PointFile,key,"LONG")
    arcpy.SelectLayerByAttribute_management(PointFile, "NEW_SELECTION", dict[key])
    arcpy.CalculateField_management(PointFile, key, 1)
    arcpy.SelectLayerByAttribute_management(PointFile, "Switch_selection")
    arcpy.CalculateField_management(PointFile, key, 0)

# Clear Selected Features
arcpy.SelectLayerByAttribute_management(PointFile, "clear_selection")

PointFeatures = arcpy.FeatureClassToFeatureClass_conversion(PointFile, outputGDB, "GCAT_LUCWOO_xy_points_" + TimeDateStr)

# dict of feature type and corresponding threshold and feature class
ftype = {'Intersection':[IntersectionThreshold, IntersectionFeatures],'Segment':[SegmentThreshold, SegmentFeatures]}

# field map and merge rules

for f in ftype:

    # Create a new fieldmappings and add the two input feature classes.
    fieldmappings = arcpy.FieldMappings()
    fieldmappings.addTable(ftype[f][1])
    fieldmappings.addTable(PointFeatures)

    # list of fields to map 
    flds = ["fatalities_count", "incapac_inj_count","non_incapac_inj_count","possible_inj_count"]
Ejemplo n.º 20
0
def idRoutes(year, root_dir, routes, final_gdb_loc):
    gdb = f"IdentifiedRoutes{year}.gdb"
    ap.env.workspace = os.path.join(root_dir, gdb)  # -----> Change

    ap.ClearWorkspaceCache_management()

    working_gdb = ap.env.workspace
    working_file = "IdentifiedRoutes_working"

    # Get input demographic feature classes from previous function outputs
    # minority_gdb = os.path.join(root_dir, f"Minority{year}.gdb")  # -----> Change Year
    # poverty_gdb = os.path.join(root_dir, f"Poverty{year}.gdb")  # -----> Change Year
    # lep_gdb = os.path.join(root_dir, f"LEP{year}.gdb")
    minority_file = os.path.join(final_gdb_loc, f"Minority{year}_Final")
    # minority_file = os.path.join(minority_gdb, f"Minority{year}_Final")
    poverty_file = os.path.join(final_gdb_loc, f"Poverty{year}_Final")
    # poverty_file = os.path.join(poverty_gdb, f"Poverty{year}_Final")
    lep_file = os.path.join(final_gdb_loc, f"LEP{year}_Final")
    medhhinc_file = os.path.join(final_gdb_loc, f"MedHHInc{year}_Final")
    # lep_file = os.path.join(lep_gdb, f"LEP{year}_Final")

    # Working feature classes
    minority_working_file = f"Minority{year}_BG"
    poverty_working_file = f"Poverty{year}_BG"
    lep_working_file = f"LEP{year}_BG"
    medhhinc_working_file = f"MedHHInc{year}_BG"

    routes_file = f"IdentifiedRoutes{year}"
    routes_working = os.path.join(working_gdb, routes_file)

    # define inputs for the for loop - one set for each demographic category
    working_list = [
        {
            "org_file":
            minority_file,  # input feature class
            "working_file":
            minority_working_file,  # working feature class for calcs
            "identified_field":
            "RegMinBG",  # field containing the threshold value for the region
            "add_fields": [['MinorityLength', 'double'],
                           ['PMinority', 'double'], ['MinorityRoute', 'SHORT']]
        },  # route fields to be added
        {
            "org_file":
            poverty_file,
            "working_file":
            poverty_working_file,
            "identified_field":
            "RegPovBG",
            "add_fields": [['PovertyLength', 'double'], ['PPoverty', 'double'],
                           ['PovertyRoute', 'SHORT']]
        },
        {
            "org_file":
            medhhinc_file,
            "working_file":
            medhhinc_working_file,
            "identified_field":
            "RegBelMedInc",
            "add_fields": [['MedHHIncLength', 'double'],
                           ['PMedHHInc', 'double'], ['MedHHIncRoute', 'SHORT']]
        },
        {
            "org_file":
            lep_file,
            "working_file":
            lep_working_file,
            "identified_field":
            "RegAbvLEP",
            "add_fields": [['LEPLength', 'double'], ['PLEP', 'double'],
                           ['LEPRoute', 'SHORT']]
        }
    ]

    # ! is this a helper function now
    if os.path.exists(working_gdb) and os.path.isdir(working_gdb):
        shutil.rmtree(working_gdb)
        print(f"{gdb} DELETED!!!")

    # CREATE WORKING GDB
    ap.CreateFileGDB_management(root_dir, gdb)
    print("GEODATABASE CREATED!!!")

    # CREATE WORKING MINORITY, POVERTY AND ROUTES FEATURE CLASSES
    ap.FeatureClassToFeatureClass_conversion(routes, working_gdb, routes_file)
    print("FEATURE CLASS CREATED!!!")

    ap.AddFields_management(routes_working, [['FullLength', 'double']])
    print('INTIIAL FIELDS ADDED TO ROUTES_WORKING FILE!!!')

    ap.CalculateFields_management(routes_working, 'PYTHON3',
                                  [['FullLength', '!shape.length@miles!']])
    print('CALCULATE FULL LENGTH OF ROUTES!!!')

    # loop through each demographic category, first collecting inputs from the working list,
    # then
    for item in working_list:
        # WORKING LIST ITEM DEFINITIONS
        org_file = item["org_file"]
        working_file = item["working_file"]
        identified_field = item["identified_field"]
        add_fields = item["add_fields"]
        routes_analysis = "routes_" + str(working_file)
        length_field = add_fields[0][0]
        percent_field = add_fields[1][0]
        id_field = add_fields[2][0]

        print("")
        print("--------------------------------")
        print("********************************")
        print("START OF " + working_file)
        print("********************************")
        print("--------------------------------")
        print("")

        # FOR LOOP FILE NAME DEFINITIONS
        dissolve_file = str(working_file) + "_dissolve"
        buffer_file = str(dissolve_file) + "_buffer"

        clip_routes = str(routes_analysis) + "_clip"
        dissolve_routes = str(clip_routes) + "_dissolve"

        # FOR LOOP POLYGON AND ROUTE GEOPROCESSING
        selected_bg = str(
            identified_field
        ) + " = 1"  # "where" expression filtering for identified blockgroups
        print(selected_bg)
        ap.FeatureClassToFeatureClass_conversion(org_file, working_gdb,
                                                 working_file, selected_bg)
        print(working_file + " CREATED!!!")

        ap.FeatureClassToFeatureClass_conversion(routes_working, working_gdb,
                                                 routes_analysis)
        print(routes_analysis + " FILE CREATED!!!")

        ap.Dissolve_management(working_file, dissolve_file,
                               '')  # dissolve all into one shape
        print(dissolve_file + " CREATED!!!")

        ap.Buffer_analysis(dissolve_file, buffer_file,
                           "50 feet")  # buffer by 50 feet
        print(buffer_file + " CREATED!!!")

        ap.Clip_analysis(routes_working, buffer_file,
                         clip_routes)  # clip routes using the dissolve shape
        print(clip_routes + " CREATED!!!")

        # calculate length of route inside identified blockgroups and compare to total length
        ap.AddField_management(clip_routes, "IdLength", "double")
        print("IdLength Field Added for " + working_file)

        ap.CalculateField_management(clip_routes, "IdLength",
                                     "!shape.geodesicLength@miles!")
        print("IdLength Field Calculated for " + working_file)

        ap.Dissolve_management(
            clip_routes, dissolve_routes, 'RouteAbbr',
            [["IdLength", 'sum']])  # collect route pieces by route
        print(clip_routes + " DISSOLVED")

        ap.JoinField_management(routes_working, "RouteAbbr", dissolve_routes,
                                "RouteAbbr",
                                ["SUM_IdLength"])  # join and sum ID'ed length
        print(routes_working + " JOINED WITH " + dissolve_routes)

        ap.AddFields_management(routes_working, add_fields)
        print("FIELDS ADDED TO " + routes_working)

        # compute percentage of total that is ID'ed then flag if greater than 0.33
        ap.CalculateFields_management(
            routes_working, 'PYTHON3',
            [[length_field, '!SUM_IdLength!'],
             [percent_field, f'percent(!{length_field}!, !FullLength!)']],
            '''def percent(calc, full):
                                        if calc is None:
                                            return 0
                                        else:
                                            return calc / full
                                    ''')
        ap.CalculateFields_management(
            routes_working, 'PYTHON3',
            [[id_field, f'ifBlock(!{percent_field}!)']],
            '''def ifBlock(percent):
                                        if percent > 0.33:
                                            return 1
                                        else:
                                            return 0
                                    ''')
        print(routes_working + " FIELDS CALCULATED")

        ap.DeleteField_management(routes_working, "SUM_IdLength")
        print("IdLength Field Deleted")

    ## loop end ##

    ap.ClearWorkspaceCache_management()

    deleteFeatureClass(routes_file, final_gdb_loc)

    # CREATE FINAL FEATURE CLASS
    ap.FeatureClassToFeatureClass_conversion(routes_file, final_gdb_loc,
                                             routes_file)
    print("---------------------------")
Ejemplo n.º 21
0
    setLyr = os.path.join(mainscratchGDB, 'settlement_' + fc + '_spjoin')
    with arcpy.da.SearchCursor(
            setLyr, ['statecode', 'statename', fc + '_id']) as cursor:
        for row in cursor:
            if row[2] is not None:
                if row[2] in polyCountDict:
                    polyCountDict[row[2]][-1] += 1
                else:
                    polyCountDict[row[2]] = list(row) + [1]

# ------------------------------------------- Creating Error Outputs ------------------------------------------- #

## OUTPUT 1 ----- for settlement counts per bua, ssa, and hamlet
    countFC = os.path.join(errorsGDB, 'settlement_' + fc + '_count')
    arcpy.FeatureClassToFeatureClass_conversion(fc, errorsGDB,
                                                'settlement_' + fc + '_count')
    addFields = ['statecode', 'statename', 'settlement_count']
    for field in addFields:
        arcpy.AddField_management(countFC, field, 'TEXT')

    with arcpy.da.UpdateCursor(
            countFC,
        ['statecode', 'statename', fc + '_id', 'settlement_count']) as cursor:
        for row in cursor:
            if row[2] in polyCountDict:
                row = polyCountDict[row[2]]
                cursor.updateRow(row)
            else:
                row[3] = 0
        for row in cursor:
            if row[3] is None:
Ejemplo n.º 22
0
# Make the XY event layer...
print "making event layer"
arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, out_Layer, spRef)

# Save to a layer file
arcpy.SaveToLayerFile_management(out_Layer, saved_Layer)
print "saved to layer file"

# project to WGS 84 (auxillary sphere)
outCS = arcpy.SpatialReference(102100)
outfc = 'trend_gages'
arcpy.Delete_management(outfc + ".shp")
arcpy.Project_management(out_Layer, outfc, outCS)
print "projected data"
arcpy.FeatureClassToFeatureClass_conversion(out_Layer, out_gdb_path, outfc)
print "feature class created for sites"

# Loop through feature classes to join all tables to trends_gages layer and create new feature classes
env.workspace = r"C:/Data/Projects/Data/sw-flow-trends/temp/" + gdb_name

tables = arcpy.ListTables()
for table in tables:

    table_layer_name = table + "_layer"

    # Create a feature layer
    arcpy.MakeFeatureLayer_management(outfc,  table_layer_name)
    
    # Join the feature layer to a table
    arcpy.AddJoin_management(table_layer_name, "site_id", table, "site_id")
Ejemplo n.º 23
0
arcpy.MakeTableView_management(ws + "\\" + tempgdb + "\\MAP_EXTRACT",
                               "MAP_EXTRACT", "DIRECTION <=2")
arcpy.AddField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "SRND", "TEXT",
                          "#", "#", "24")
arcpy.CalculateField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "SRND",
                                """Mid([NQR_DESCRIPTION],4,16)""", "VB", "#")
arcpy.AddField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "CRND", "TEXT",
                          "#", "#", "24")
arcpy.CalculateField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "CRND",
                                """[NQR_DESCRIPTION]""", "VB", "#")
arcpy.MakeRouteEventLayer_lr(
    "CRND", "NE_UNIQUE", ws + "\\" + tempgdb + "/MAP_EXTRACT",
    "NQR_DESCRIPTION LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", "STATE_SYSTEM",
    "#", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT")
arcpy.FeatureClassToFeatureClass_conversion("STATE_SYSTEM",
                                            ws + "\\" + tempgdb,
                                            "STATE_SYSTEM",
                                            '"DIRECTION" <3')  #9 sec
arcpy.AddField_management(ws + "\\" + tempgdb + "\\STATE_SYSTEM",
                          "NETWORKDATE", "DATE")
arcpy.CalculateField_management(ws + "\\" + tempgdb + "\\STATE_SYSTEM",
                                "NETWORKDATE", "datetime.datetime.now( )",
                                "PYTHON_9.3", "#")
arcpy.MakeFeatureLayer_management(ws + "\\" + tempgdb + "\\STATE_SYSTEM",
                                  "STATE_SYSTEM")

import LRS_CRND_SRND
import LRS_PRECISION_SDO
#import LRS_VIDEOLOG
#import ITEMS

#import MoveNet2Dev
Ejemplo n.º 24
0
    def execute(self, parameters, messages):
        inFeatures = parameters[0].valueAsText
        size = parameters[1].valueAsText
        outFeatures = parameters[2].valueAsText

        outDirName, outFcName = os.path.split(outFeatures)

        inDesc = arcpy.Describe(inFeatures)

        sr = inDesc.spatialReference
        ext = inDesc.extent

        wktExtent = "POLYGON (({} {}, {} {}, {} {}, {} {}, {} {}))".format(
            ext.XMin, ext.YMin, ext.XMin, ext.YMax, ext.XMax, ext.YMax,
            ext.XMax, ext.YMin, ext.XMin, ext.YMin)

        pydir = os.path.dirname(os.path.abspath(__file__))
        sqliteExe = os.path.join(
            pydir, 'mod_spatialite-4.3.0a-win-amd64/sqlite3.exe')
        if (not os.path.exists(sqliteExe)):
            messages.addErrorMessage(
                'need mod_spatalite. see _download_mod_spatilite.ps1 and download it.'
            )
            return

        with _tempSqlite(None) as tmpLite:
            print(tmpLite.temp_dir)

            with open(tmpLite.sqlFile, 'w') as f:
                # verson check 400x ? Desktop
                installDir = os.path.join(arcpy.GetInstallInfo()["InstallDir"],
                                          "bin")
                sys.path.append(installDir)

                f.write("""
SELECT load_extension('mod_spatialite');
CREATE VIRTUAL TABLE ElementaryGeometries USING VirtualElementary();

CREATE TABLE tmp_hgrid (id INTEGER PRIMARY KEY);
SELECT AddGeometryColumn('tmp_hgrid', 'geom', 0, 'MULTIPOLYGON', 'XY');

INSERT INTO tmp_hgrid
SELECT
  1,
  ST_HexagonalGrid(  
   ST_GeomFromText('""" + wktExtent + """'), """ + size + """);

CREATE TABLE hgrid (id INTEGER PRIMARY KEY);
SELECT AddGeometryColumn('hgrid', 'geom', 0, 'POLYGON', 'XY');

INSERT INTO hgrid 
SELECT
  e.item_no,
  e.geometry
FROM
 ElementaryGeometries e 
WHERE
 e.db_prefix = 'main' AND 
 e.f_table_name = 'tmp_hgrid' AND
 e.f_geometry_column = 'geom' AND
 e.origin_rowid = 1
;
""")

            res = tmpLite.excuteSql()

            p = res['process']
            stdout_data = res['stdout']
            stderr_data = res['stderr']

            if (p.returncode != 0):
                print(stderr_data)

            arcpy.FeatureClassToFeatureClass_conversion(
                in_features=os.path.join(tmpLite.sqliteFile, "main.hgrid"),
                out_path=outDirName,
                out_name=outFcName)

            arcpy.DefineProjection_management(outFeatures, sr)
Ejemplo n.º 25
0
# DATA PROCESSING

# Nhood_buffers:
arcpy.Buffer_analysis("Nhoods", "nhood_buffers",
                      buffer_distance_or_field="100 Feet",
                      line_side="FULL", line_end_type="ROUND",
                      dissolve_option="LIST", dissolve_field="Name",
                      method="PLANAR")

# Parks:

parks = os.path.join(
    r"\\cityfiles\Shared\PARKS AND RECREATION SHARED\GIS Data",
    r"Parks Data.gdb\Parks")

arcpy.FeatureClassToFeatureClass_conversion(parks, "in_memory", "mem_parks")

# Delete Parks Fields
arcpy.DeleteField_management("mem_parks", drop_field="Reference;Rec_Date;Doc_Links;Subtype;Ownership;Origin;Maintenance;Platted_Size;Maint_Level;Status;Assessors_Parcel_No;Acres;Dev_Status;Owner_Type;Maint_Responsibility;Shape_Length;Shape_Area")


# COMMON AREAS

CAMA = r"W:\DATA\CAMA\Missoula\MissoulaOwnerParcel_shp\MissoulaOwnerParcel_shp.shp"

arcpy.Select_analysis(CAMA, "in_memory/mem_commons", '''"LegalDescr" LIKE
\'%COMMON%\'''')

# make new field "CAName"
arcpy.AddField_management("mem_commons", "CAName", "TEXT", "", "", 50)
input_prj = data_desc.SpatialReference.exportToString()

if not os.path.isfile(out_path) or not skip_existing:
    arcpy.ProjectRaster_management(in_path, out_path, final_prj, "BILINEAR",
                                   "0.0008333", "#", "#", input_prj)

##	Set our snapping environment to our new, reprojected and
##		prediction_density layer:
popdensity_weighting_final = out_path
arcpy.env.snapRaster = out_path

##	Create a temporary copy of our census data with an appropriate
##		population field to sum and distribute:
if (arcpy.Exists("admin") == False):
    arcpy.MakeFeatureLayer_management(adminpop, "admin")
arcpy.FeatureClassToFeatureClass_conversion('admin', arcpy.env.workspace,
                                            'admin_Union')
if (arcpy.Exists("admin_Union") == False):
    arcpy.MakeFeatureLayer_management("/admin_Union.shp", "admin_Union")

if (len(arcpy.ListFields('admin_Union', "POP")) == 0):
    arcpy.AddField_management('admin_Union', 'POP', 'Double')
arcpy.CalculateField_management('admin_Union', 'POP', '!ADMINPOP!',
                                'PYTHON_9.3')

##	We need the landcover file for the country projected into our output projection:
if not ("Landcover" in dataset_folders):
    print(
        "ERROR:  No \"Landcover\" folder found!  This is required and indicates you possibly did not run the \"Data Preparation, R.r\" script or specify configuration options correctly in this Python processing script!"
    )
    exit()
else:
Ejemplo n.º 27
0
            feature_lyr_field_stem = feature_lyr_field_split[1]
        
            if feature_lyr_field_stem not in keep_fields:
                feature_lyr_field_field_map_index = field_mappings.findFieldMapIndex(feature_lyr_field_stem)
                if feature_lyr_field_field_map_index > -1:
                    field_mappings.removeFieldMap(feature_lyr_field_field_map_index)
        
        
        
        for scenario in scenarios:
            scenario_fc_name = '{}_{}_{}'.format(lyr_name, q[0], scenario)
            scenario_query = q[1][scenario]
            print('Creating {} based on the condition(s) where {}...'.format(scenario_fc_name, scenario_query))
            arcpy.FeatureClassToFeatureClass_conversion(in_features = feature_lyr,
                                                        out_path = setup_gdb,
                                                        out_name = scenario_fc_name,
                                                        where_clause = scenario_query,
                                                        field_mapping = field_mappings)
            scenario_fc = os.path.join(setup_gdb, scenario_fc_name)
            scenario_out_fields = arcpy.ListFields(scenario_fc)
            for scenario_out_field in scenario_out_fields:
                if scenario_out_field.name[-2:] == "_1":
                    arcpy.DeleteField_management(scenario_fc, scenario_out_field.name)
            print('Success!')

        # Remove join 
        arcpy.RemoveJoin_management(feature_lyr)

################################################################
## DISTRIBUTE DATA FROM SETUP.GDB TO INDIVIDUAL SCENARIO GDBS ##
################################################################
Ejemplo n.º 28
0
for csvFile in fileList:
    if csvFile.endswith('.csv'):
        #Extract the .csv file name for naming purposes
        csvName = os.path.basename(csvFile[:-4])

        #Name of the shapefile to create
        outFC = csvName + ".shp"

        print "Creating shapefile from csv file: %s" % csvName

        #Add the XY data
        arcpy.MakeXYEventLayer_management(csvFile, "X", "Y", "tempLay",
                                          spatialRef)

        #Convert the XY data layer to a shapefile
        arcpy.FeatureClassToFeatureClass_conversion("tempLay", outPathshp,
                                                    outFC)

        #Add fields I will need to the shapefile attribute table
        arcpy.AddField_management(outPathshp + outFC, "fldext", "TEXT")

        arcpy.AddField_management(outPathshp + outFC, "wd_hlfft", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "wd_1ft", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "wd_2ft", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "wd_3ft", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "wd_4ft", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "wd_5ft", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "wd_6ft", "TEXT")

        arcpy.AddField_management(outPathshp + outFC, "fv_1mph", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "fv_2mph", "TEXT")
        arcpy.AddField_management(outPathshp + outFC, "fv_4mph", "TEXT")
    def make_filled_tripshed_poly(self, in_df):
        '''Fills in gaps in trip shed polygon to ensure area includes empty areas that, if developed,
        would fall in the link trip shed. 
        
        Key steps:
            1 - create raw trip shed polygon based on whatever poly IDs are within the raw downloaded trip table
            2 - join this raw polygon set to a "full" poly file (e.g. all block groups in region)
            3 - tag polys in the trip shed
            4 - spatial select non-trip-shed polys that share a line segment with trip shed polys
            5 - from those spatially selected polygons, remove from the selection if they are above a certain area (area_threshold_ft2 variable)
            6 - For remaining selected non-trip-shed polys, tag them as being in the trip shed
            7 - Export this expanded set of trip-shed polys to temporary FC
            8 - Intersect this temporary FC with a "filler" FC that is one feature representing the whole region. This creates a "union FC"
                with polys that fill in the holes of the expanded poly temp FC
            9 - Select all but the largest features in the "union FC" and merge them with the expanded trip shed poly FC. This fills in the holes
                that may still exist in the expanded polyFC. Result is expanded poly FC with no "holes" in the trip shed.
            '''
        
        
        full_poly_fc = self.in_poly_fc
        raw_tripshed_fc = self.out_poly_fc_raw
        filler_fc = self.filler_poly_fc
        
        scratch_gdb = arcpy.env.scratchGDB
        
        self.create_raw_tripshed_poly(in_df)  # Make trip shed polygon

        
        fl_full_poly = 'fl_full_poly'
        fl_tripshed = 'fl_tripshed'
        
        if arcpy.Exists(fl_full_poly): arcpy.Delete_management(fl_full_poly)
        arcpy.MakeFeatureLayer_management(full_poly_fc, fl_full_poly)
        
        if arcpy.Exists(fl_tripshed): arcpy.Delete_management(fl_tripshed)
        arcpy.MakeFeatureLayer_management(raw_tripshed_fc, fl_tripshed)
        
        # attribute join raw trip shed FC to full set of polygons FC
        arcpy.AddJoin_management(fl_full_poly, self.poly_id_field, fl_tripshed, self.poly_id_field)
        
        # save joined fc as temp fc to scratch GDB
        temp_joined_fc = 'TEMP_joinedpoly_fc'
        temp_joined_fc_path = os.path.join(scratch_gdb, temp_joined_fc)
        
            
        if arcpy.Exists(temp_joined_fc_path): arcpy.Delete_management(temp_joined_fc_path)
        arcpy.FeatureClassToFeatureClass_conversion(fl_full_poly, scratch_gdb, temp_joined_fc)
        
        temp_joined_fl = 'temp_joined_fl'
        
        if arcpy.Exists(temp_joined_fl): arcpy.Delete_management(temp_joined_fl)
        arcpy.MakeFeatureLayer_management(temp_joined_fc_path, temp_joined_fl)
        
        # add field to joined FC indicating 1/0 if it's part of trip shed. default zero. 1 if there's a join match
        fld_tripshedind = "TripShed"
        arcpy.AddField_management(temp_joined_fl, fld_tripshedind, "SHORT")
        
        self.tag_if_joined_cursor(temp_joined_fl, [self.col_total_tripends, fld_tripshedind])
        
        # spatial select features that share a line with raw trip shed
        raw_tripshed_fl = 'raw_tripshed_fl'
        
        arcpy.AddMessage("filling in gaps in trip shed polygon...")
        if arcpy.Exists(raw_tripshed_fl): arcpy.Delete_management(raw_tripshed_fl)
        arcpy.MakeFeatureLayer_management(raw_tripshed_fc, raw_tripshed_fl)
        
        arcpy.SelectLayerByLocation_management(temp_joined_fl, "SHARE_A_LINE_SEGMENT_WITH", raw_tripshed_fl)
        
        # subselect where not part of original raw trip shed, but yes shares line segment w raw shed, and area < 20,000 ft2 (avoid large rural block groups)
        area_threshold_ft2 = 15000000
        fld_shape_area = "Shape_Area"
        sql1 = "{} <= {} AND {} = 0".format(fld_shape_area, area_threshold_ft2, fld_tripshedind)
        arcpy.SelectLayerByAttribute_management(temp_joined_fl, "SUBSET_SELECTION", sql1)
            
        # then update the 1/0 field indicating if it's part of trip shed
        with arcpy.da.UpdateCursor(temp_joined_fl, [fld_tripshedind]) as cur:
            for row in cur:
                row[0] = 1
                cur.updateRow(row)
        
        # new selection of all polygons where trip shed flag = 1, then export that as temporary FC
        sql2 = "{} = 1".format(fld_tripshedind)
        arcpy.SelectLayerByAttribute_management(temp_joined_fl, "NEW_SELECTION", sql2)
        
        temp_fc_step2 = "TEMP_joinedpolyfc_step2"
        temp_fc_step2_path = os.path.join(scratch_gdb, temp_fc_step2)
        if arcpy.Exists(temp_fc_step2_path): arcpy.Delete_management(temp_fc_step2_path)
        arcpy.FeatureClassToFeatureClass_conversion(temp_joined_fl, scratch_gdb, temp_fc_step2)
        
        
        # Union whole region polygon with expanded "step2" trip shed
        temp_union_fc = "TEMP_poly_union_fc"
        temp_union_fc_path = os.path.join(scratch_gdb, temp_union_fc)
        temp_union_fl = 'temp_union_fl' 
        
        if arcpy.Exists(temp_union_fc_path): arcpy.Delete_management(temp_union_fc_path)
        arcpy.Union_analysis([temp_fc_step2_path, filler_fc], temp_union_fc_path)
        
        if arcpy.Exists(temp_union_fl): arcpy.Delete_management(temp_union_fl)
        arcpy.MakeFeatureLayer_management(temp_union_fc_path, temp_union_fl)
        
        # From union result, select where tripshed joined FID = -1 (parts of the region polygon that fall outside of the tripshed polygon)
        fld_fid = 'FID_{}'.format(temp_fc_step2)
        sql3 = "{} = -1".format(fld_fid)
        arcpy.SelectLayerByAttribute_management(temp_union_fl, "NEW_SELECTION", sql3)
        
        # Run singlepart-to-multipart, which makes as separate polygons
        temp_singleprt_polys_fc = "TEMP_singlepart_fillerpolys"
        temp_singleprt_polys_fc_path = os.path.join(scratch_gdb, temp_singleprt_polys_fc)
        temp_singleprt_polys_fl = 'temp_singleprt_polys_fl'
        
        if arcpy.Exists(temp_singleprt_polys_fc_path): arcpy.Delete_management(temp_singleprt_polys_fc_path)
        arcpy.MultipartToSinglepart_management(temp_union_fl, temp_singleprt_polys_fc_path)
        
        if arcpy.Exists(temp_singleprt_polys_fl): arcpy.Delete_management(temp_singleprt_polys_fl)
        arcpy.MakeFeatureLayer_management(temp_singleprt_polys_fc_path, temp_singleprt_polys_fl)
        
        
        # From the multipart, select all but the geographically largest polygon and union to the block-group file
        # doing this will make it so you only get the "hole filler" pieces
        values = []
        with arcpy.da.SearchCursor(temp_singleprt_polys_fc_path, ["SHAPE@AREA"]) as cur:
            for row in cur:
                values.append(row[0])
                
        largest_poly_area = max(values)
        
        sql = "{} < {}".format(fld_shape_area, largest_poly_area)
        arcpy.SelectLayerByAttribute_management(temp_singleprt_polys_fl, "NEW_SELECTION", sql)
        

        # Merge the "hole fillers" with the expanded trip shed (i.e., raw trip shed + "share a line segment" polys added to it).
        # Result will be block group trip shed, with the holes filled in with non-block-group “hole filler” polygons       
        if arcpy.Exists(self.fc_tripshed_out_filled): arcpy.Delete_management(self.fc_tripshed_out_filled)
        arcpy.Merge_management([temp_fc_step2_path, temp_singleprt_polys_fl], self.fc_tripshed_out_filled)
        
        for fc in [temp_joined_fc_path, temp_fc_step2_path, temp_union_fc_path, temp_singleprt_polys_fc_path]:
            try:
                arcpy.Delete_management(fc)
            except:
                continue
Ejemplo n.º 30
0
intersected_gaul_erased = outgdb + "intersected_gaul_erased"
wdpa_only_multi_iso3 = outgdb + "wdpa only multi iso3"
wdpa_multi_iso3_together = outgdb + "wdpa_multi_iso3_together"
wdpa_multi_iso3_together_dissolved_ISO3final = outgdb + "wdpa_multi_iso3_together_dissolved_ISO3final"
wdpa_multi_iso3_together_dissolved_ISO3final_LYR = "wdpa_multi_iso3_together_dissolved_ISO3final_LYR"
wdpa_multi_iso3_together_dissolved_ISO3final_1km2 = outgdb + "wdpa_multi_iso3_together_dissolved_ISO3final_1km2"
wdpa_multi_iso3_together_dissolved_ISO3final_1km2_singleparted = outgdb + "wdpa_multi_iso3_together_dissolved_ISO3final_1km2_singleparted"
wdpa_flat_1km2_final = outgdb + "wdpa_flat_1km2_final"
All_distances_300km_Nov2019 = "All_distances_300km_Nov2019"
outfile_attr = "Attrib_table_wdpa_flat_1km2_final_mar2020.txt"
outfile_dist = "Z:/globes/USERS/GIACOMO/protconn/data/all_distances_300km_mar2020.txt"
wdpa_for_protconn_gpkg = "Z:/globes/USERS/GIACOMO/protconn/data/wdpa_for_protconn.gpkg"

# Process: Feature Class to Feature Class
arcpy.FeatureClassToFeatureClass_conversion(
    input_shp, outgdb, "wdpa_all_relevant_shape_simpl", "",
    "wdpaid \"wdpaid\" true true false 24 Double 15 23 ,First,#,Z:\\globes\\USERS\\GIACOMO\\protconn\\wdpa_all_relevant_shape_simpl.shp,wdpaid,-1,-1;iso3 \"iso3\" true true false 50 Text 0 0 ,First,#,Z:\\globes\\USERS\\GIACOMO\\protconn\\wdpa_all_relevant_shape_simpl.shp,iso3,-1,-1",
    "")

# Process: Define Projection
arcpy.DefineProjection_management(
    wdpa_all_relevant_shape_simpl,
    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
)
print("Simplified features imported")

# Process: Repair Geometry (2)
arcpy.RepairGeometry_management(wdpa_all_relevant_shape_simpl, "DELETE_NULL")
print("Geometries repaired")

# Process: Dissolve
arcpy.Dissolve_management(wdpa_all_relevant_shape_simpl,