예제 #1
0
    def execute(self, parameters, messages):
        """The source code of the tool."""

        # local variables and env
        arcpy.CreateFileGDB_management("E:/gina/poker/gdb",
                                       parameters[0].valueAsText)
        arcpy.env.workspace = "E:/gina/poker/gdb/" + parameters[
            0].valueAsText + ".gdb"
        arcpy.env.overwriteOutput = True
        adnr_lo_shp = "E:/gina/poker/shp/wip/land_ownership_data/adnr_gls_dls_merge_20170823_v1.shp"
        pfrr_popn_places = "E:/gina/poker/shp/wip/popn_places_data/pokerflat_popn_places_gcs_wgs84_to_akalbers_2.shp"
        afs_known_sites = "E:/gina/poker/shp/afs_data/afs_known_sites_20180629_3338.shp"
        pipTable = "E:/gina/poker/dbf/predicted_impact_xy.dbf"
        pip_point_shp = "E:/gina/poker/pip/pip_point.shp"
        pip_point_3338 = "E:/gina/poker/pip/pip_point_3338.shp"
        pip_buffer_shp = "E:/gina/poker/pip/pip_buffer.shp"
        pip_range_rings_shp = "E:/gina/poker/pip/pip_range_rings.shp"
        pip_lo_in_buffer_shp = "E:/gina/poker/pip/pip_lo_in_buffer.shp"
        pip_lo_in_buf_sum_dbf = "E:/gina/poker/pip/pip_lo_in_buf_sum.dbf"
        pip_lo_in_buf_sum_csv = "E:/gina/poker/pip/pip_lo_in_buf_sum.csv"
        pip_popn_places_in_buffer_shp = "E:/gina/poker/pip/pip_popn_places_in_buffer.shp"
        pip_known_sites_in_buffer_shp = "E:/gina/poker/pip/pip_known_sites_in_buffer.shp"
        x = parameters[2].valueAsText
        y = parameters[3].valueAsText
        r = parameters[6].valueAsText + " NauticalMiles"
        rr1 = (float(parameters[6].valueAsText)) / 3
        rr2 = (rr1 * 2)
        rrs = str(rr1) + ";" + str(rr2) + ";" + r.split(" ")[0]
        pipLayer = "pipLayer1"
        srs = arcpy.SpatialReference("Alaska Albers Equal Area Conic")
        intersect_fc1 = [adnr_lo_shp, pip_buffer_shp]
        intersect_fc2 = [pfrr_popn_places, pip_buffer_shp]
        intersect_fc3 = [afs_known_sites, pip_buffer_shp]
        mxd = arcpy.mapping.MapDocument("current")
        dataframe = arcpy.mapping.ListDataFrames(mxd)[0]
        sourceLoSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/lo2.lyr")
        sourcePipSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/pip2.lyr")
        sourceRrsSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/rrs.lyr")
        sourcePopSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/pop.lyr")
        sourceAfsSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/afs2.lyr")

        # Process: Calculate Lon Field
        arcpy.CalculateField_management(pipTable, "Lon", x, "PYTHON", "")

        # Process: Calculate Lat Field
        arcpy.CalculateField_management(pipTable, "Lat", y, "PYTHON", "")

        # Process: Make XY Event Layer
        arcpy.MakeXYEventLayer_management(
            pipTable, "Lon", "Lat", pipLayer,
            "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision",
            "")

        # Process: Copy Features
        arcpy.CopyFeatures_management(pipLayer, pip_point_shp, "", "0", "0",
                                      "0")

        # Process: Project pip point
        arcpy.Project_management(pip_point_shp, pip_point_3338, srs)

        # Process: Buffer pip point
        arcpy.Buffer_analysis(pip_point_3338, pip_buffer_shp, r, "FULL",
                              "ROUND", "NONE", "", "PLANAR")

        # Process: Multiple Ring Buffer
        arcpy.MultipleRingBuffer_analysis(pip_point_3338, pip_range_rings_shp,
                                          rrs, "NauticalMiles", "", "NONE",
                                          "FULL")

        # Process: Intersect pip buffer with land ownership
        arcpy.Intersect_analysis(intersect_fc1, pip_lo_in_buffer_shp, "ALL",
                                 "", "INPUT")

        # Process: Intersect pip buffer with popn places
        arcpy.Intersect_analysis(intersect_fc2, pip_popn_places_in_buffer_shp,
                                 "ALL", "", "INPUT")

        # Process: Intersect pip buffer with afs known sites
        arcpy.Intersect_analysis(intersect_fc3, pip_known_sites_in_buffer_shp,
                                 "ALL", "", "INPUT")

        # Process: Make feature layers and add to the map
        ## pip feature class list
        fclist = arcpy.ListFeatureClasses()

        ## pip layer
        arcpy.MakeFeatureLayer_management(pip_point_3338,
                                          "Predicted Impact Point")

        ## land ownership layer
        arcpy.MakeFeatureLayer_management(
            pip_lo_in_buffer_shp,
            "Land Ownership within 3sigma of Predicted Impact Point")

        ## Range Rings
        arcpy.MakeFeatureLayer_management(pip_range_rings_shp, "Range Rings")

        ## populated places layer
        popn_places_records = int(
            arcpy.GetCount_management(pip_popn_places_in_buffer_shp).getOutput(
                0))
        if popn_places_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_popn_places_in_buffer_shp,
                "Populated Places within 3sigma of Predicted Impact Point")
            addPipPopnPlacesLayer = arcpy.mapping.Layer(
                "Populated Places within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipPopnPlacesLayer)

        ## known sites layer
        known_sites_records = int(
            arcpy.GetCount_management(pip_known_sites_in_buffer_shp).getOutput(
                0))
        if known_sites_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_known_sites_in_buffer_shp,
                "AFS Known Sites within 3sigma of Predicted Impact Point")
            addPipKnownSitesLayer = arcpy.mapping.Layer(
                "AFS Known Sites within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipKnownSitesLayer)

        addPipPointLayer = arcpy.mapping.Layer("Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, addPipPointLayer)

        add3sigmaLoLayer = arcpy.mapping.Layer(
            "Land Ownership within 3sigma of Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, add3sigmaLoLayer)

        addRangeRings = arcpy.mapping.Layer("Range Rings")
        arcpy.mapping.AddLayer(dataframe, addRangeRings)

        # Add and calc Acres field for intersected Land Ownership
        arcpy.AddField_management(pip_lo_in_buffer_shp, "Acres", "DOUBLE")
        arcpy.CalculateField_management(pip_lo_in_buffer_shp, "Acres",
                                        "!shape.area@acres!", "PYTHON_9.3", "")

        # Summarize intersected Land Ownership by Owner and total Acres
        arcpy.Statistics_analysis(pip_lo_in_buffer_shp, pip_lo_in_buf_sum_dbf,
                                  "Acres SUM", "OWNER")
        arcpy.MakeTableView_management(pip_lo_in_buf_sum_dbf)
        add3sigmaLoSumTbl = arcpy.mapping.TableView(pip_lo_in_buf_sum_dbf)
        arcpy.mapping.AddTableView(dataframe, add3sigmaLoSumTbl)

        # Symbolize and Refresh
        lo_layer = arcpy.mapping.ListLayers(
            mxd, "*Land Ownership within 3sigma of Predicted Impact Point*",
            dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, lo_layer, sourceLoSymbologyLayer,
                                  True)
        lo_layer.symbology.addAllValues()

        pip_layer = arcpy.mapping.ListLayers(mxd, "*Predicted Impact Point*",
                                             dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, pip_layer,
                                  sourcePipSymbologyLayer, True)

        rr_layer = arcpy.mapping.ListLayers(mxd, "*Range Rings*", dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, rr_layer, sourceRrsSymbologyLayer,
                                  True)

        pop_layer = arcpy.mapping.ListLayers(mxd, "*Populated Places*",
                                             dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, pop_layer,
                                  sourcePopSymbologyLayer, True)

        afs_layer = arcpy.mapping.ListLayers(mxd, "*Known Sites*",
                                             dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, afs_layer,
                                  sourceAfsSymbologyLayer, True)

        arcpy.RefreshTOC()
        arcpy.RefreshActiveView()

        # Populate Mission GDB
        mission_layers = [
            pip_point_3338, pip_lo_in_buffer_shp,
            pip_popn_places_in_buffer_shp, pip_range_rings_shp,
            pip_known_sites_in_buffer_shp
        ]
        arcpy.FeatureClassToGeodatabase_conversion(mission_layers,
                                                   arcpy.env.workspace)

        return
        centroid_coords = []
        for feature in cursor:
            centroid_coords.append(feature[0])
    del cursor

    point = arcpy.Point()
    pointGeometryList = []

    for pt in centroid_coords:
        point.X = pt[0]
        point.Y = pt[1]

        pointGeometry = arcpy.PointGeometry(point)
        pointGeometryList.append(pointGeometry)

    arcpy.CopyFeatures_management(pointGeometryList, projectPoint)
    arcpy.DefineProjection_management(projectPoint, spatialRef)
    print "A point has been created at the center of the project polygon."
except:
    print "There was a problem creating the project center point."
    print arcpy.GetMessages()

# Add project point to inset map
try:
    projectPointLayer = arcpy.mapping.Layer(projectPoint)
    arcpy.mapping.AddLayer(insetDF, projectPointLayer)
    print "The project center point has been added to the inset map."
except:
    print "There was a problem adding the project center point to the inset map."
    print arcpy.GetMessages()
예제 #3
0
    def createProjectAreas(self):
        '''
        #####################################################################################
        #### --------------------------------GEOPROCESSES--------------------------------####
        #####################################################################################
        ############################################
        ## Set environments and scratch workspace ##
        ############################################
        '''
        # set environments for ansey raster analyses
        arcpy.env.snapRaster = Raster(self.templateRaster)
        arcpy.env.extent = self.countryBounds
        arcpy.env.mask = self.countryBounds
        arcpy.env.cellSize = Raster(self.templateRaster)

        env.workspace = self.scratch
        env.scratchWorkspace = self.scratch

        '''
        #################################################
        ## Check for fishnet file and create if needed ##
        #################################################
        '''

        fishnetSizeStr = str(self.fishnetSize).replace(".", "_")

        fishnet = "in_memory/fishnet_" + fishnetSizeStr \
                  + "km"  ## MUST add .shp if not putting file in gdb (for add field function)
        # fishnet = r"R:\users\anagha.uppal\MapRE\MapRE_data\OUTPUTS\SAPP\SAPP_Outputs.gdb" + "\\" + "fishnet_" + fishnetSizeStr + "km"
        clippedFishnet = self.fishnetDirectory + "\\" + "fishnet_" + fishnetSizeStr + "km"

        env.outputCoordinateSystem = self.templateRaster
        if not (arcpy.Exists(clippedFishnet)):
            # Create fishnet if one does not already exist:
            arcpy.AddMessage("Creating fishnet " + fishnetSizeStr + " km in size to file: " + fishnet)

            extent = Raster(self.templateRaster).extent

            XMin = extent.XMin  ## left

            YMin = extent.YMin  ## Bottom

            origin = str(XMin) + " " + str(YMin)

            YMax = extent.YMax  ## top

            ycoord = str(XMin) + " " + str(YMax)

            arcpy.CreateFishnet_management(fishnet, origin, ycoord,
                                           self.fishnetSize * 1000, self.fishnetSize * 1000,
                                           "", "", "", "NO_LABELS", self.countryBounds, "POLYGON")

            fields = arcpy.ListFields(fishnet)
            for field in fields:
                arcpy.AddMessage(field.name)
            # Change fishnet Object ID name:
            arcpy.AddField_management(fishnet, "Text", "Text", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
            # Process: Calculate Field to create new alphanumeric OID column
            arcpy.CalculateField_management(fishnet, "Text", "'A' + str(!OID!)", "PYTHON_9.3", "")

            arcpy.AddMessage("Creating country-boundary-clipped fishnet " + fishnetSizeStr
                             + " km in size to file: " + clippedFishnet)
            arcpy.Clip_analysis(fishnet, self.countryBounds, clippedFishnet)

        arcpy.AddMessage("Copying fishnet to memory :" + clippedFishnet)
        fishnetInMemory = arcpy.CopyFeatures_management(clippedFishnet, "in_memory/clipped_fishnet")

        # Temporary variables:
        IntermediateIntersect_geoUnits = "IntermediateIntersect_geoUnits"
        Intermediate = "in_memory/intermediate_2"
        IntermediateErased = "in_memory/intermediateErased_2"
        IntermediateIntersect = "in_memory/IntermediateIntersect_2"
        IntermediateIntersect_singlept = "in_memory/IntermediateIntersect_singlept"
        # IntermediateAggregatedFeatures = "in_memory/IntermediateAggregatedFeatures_2"
        # IntermediateIntersectErased = "in_memory/IntermediateIntersectErased_2"
        IntermediateEliminated = "in_memory/IntermediateEliminated"
        IntermediateEliminated2 = "in_memory/IntermediateEliminated2"
        # IntermediateSelectedForAggregation1 = "in_memory/IntermediateSelectedForAggregation1_2"
        # IntermediateSelectedForAggregation2 = "in_memory/IntermediateSelectedForAggregation2_2"
        # IntermediateIntersect_geoUnits_2 = "in_memory/IntermediateIntersect_geoUnits_2"

        '''
        ###############
        ## Intersect ##
        ###############
        '''
        ## COPY SUITABLE SITES FEATURE CLASS TO MEMORY
        sites = arcpy.CopyFeatures_management(self.suitableSites, "in_memory/suitableSites")

        ## INTERSECT Geographic Unit of Analysis, if provided
        if arcpy.Exists(self.geoUnits):
            arcpy.AddMessage("Intersecting by geographic units of analysis")
            arcpy.Intersect_analysis([sites, self.geoUnits], IntermediateIntersect_geoUnits, "NO_FID")
        else:
            IntermediateIntersect_geoUnits = sites

        # calculate area:
        arcpy.AddField_management(IntermediateIntersect_geoUnits, "Area", "DOUBLE", "", "", "", "", "NULLABLE",
                                  "NON_REQUIRED", "")
        # Process: Calculate Field
        arcpy.CalculateField_management(IntermediateIntersect_geoUnits, "Area", "!Shape.Area@squarekilometers!",
                                        "PYTHON_9.3", "")

        # select polygons greater than max area to split
        arcpy.Select_analysis(IntermediateIntersect_geoUnits, Intermediate, self.whereClauseMax)
        # erase selected areas from potentialSites (isolate all polygons less than max to merge later)
        arcpy.Erase_analysis(IntermediateIntersect_geoUnits, Intermediate, IntermediateErased)

        # Intersect regions above max area using fishnet
        arcpy.AddMessage("Intersecting by fishnet")
        arcpy.Intersect_analysis([Intermediate, fishnetInMemory], IntermediateIntersect, "NO_FID")
        arcpy.AddMessage("finished intersecting by fishnet")
        # Process: Calculate Area
        arcpy.CalculateField_management(IntermediateIntersect, "Area", "!Shape.Area@squarekilometers!", "PYTHON_9.3",
                                        "")

        '''
        ################################
        ## Create singlepart polygons ##
        ################################
        '''
        ## Multi-part to single part
        arcpy.MultipartToSinglepart_management(in_features=IntermediateIntersect,
                                               out_feature_class=IntermediateIntersect_singlept)
        ## Recalculate area
        arcpy.CalculateField_management(IntermediateIntersect_singlept, "Area", "!Shape.Area@squarekilometers!",
                                        "PYTHON_9.3", "")
        '''
        ###############################
        ## Eliminate slivers - twice ##
        ###############################
        '''
        arcpy.AddMessage("Starting elimination")
        # Execute MakeFeatureLayer
        tempLayer = arcpy.MakeFeatureLayer_management(IntermediateIntersect_singlept, "tempLayer")

        # # Execute SelectLayerByAttribute to define features to be eliminated
        # arcpy.SelectLayerByAttribute_management(in_layer_or_view=tempLayer, selection_type="NEW_SELECTION",
        #                                         where_clause=self.whereClauseMin)
        #
        # # Execute Eliminate
        # arcpy.Eliminate_management("tempLayer", IntermediateEliminated, "LENGTH")

        ## iteration 2

        # # Execute MakeFeatureLayer
        # IntermediateEliminated_tempLayer = arcpy.MakeFeatureLayer_management(IntermediateEliminated,
        #                                                                      "IntermediateEliminated")
        #
        # # Execute SelectLayerByAttribute to define features to be eliminated
        # arcpy.SelectLayerByAttribute_management(in_layer_or_view=IntermediateEliminated_tempLayer,
        #                                         selection_type="NEW_SELECTION", where_clause=self.whereClauseMin)
        #
        # # Execute Eliminate
        # arcpy.Eliminate_management(IntermediateEliminated_tempLayer, IntermediateEliminated2, "LENGTH")

        '''
        ################################################
        ## Merge aggregated with intersected features ##
        ################################################
        '''
        # # Merge aggregated polygons with larger, split polygons
        # merged = arcpy.Merge_management([IntermediateErased, IntermediateEliminated2], "in_memory/intermediateProjects")

        ## AGAIN, INTERSECT Geographic Unit of Analysis, if provided
        if arcpy.Exists(self.geoUnits):
            arcpy.AddMessage("Intersecting by geographic units of analysis")
            arcpy.Intersect_analysis([tempLayer, self.geoUnits], IntermediateIntersect_geoUnits, "NO_FID")
            arcpy.AddMessage("Finished intersecting by geographic units of analysis")
        else:
            IntermediateIntersect_geoUnits = tempLayer

        # recalculate area
        arcpy.CalculateField_management(IntermediateIntersect_geoUnits, "Area", "!Shape.Area@squarekilometers!",
                                        "PYTHON_9.3", "")
        arcpy.CopyFeatures_management(IntermediateIntersect_geoUnits, "intermediate_1")
        # select areas above minimum and save ## CREATE PROJECT FEATURE CLASS
        try:
            arcpy.Select_analysis(IntermediateIntersect_geoUnits, self.projectsOut, self.whereClauseMinContArea)
        except:
            arcpy.CopyFeatures_management(IntermediateIntersect_geoUnits, self.projectsOut)
        ## Process: Summary Statistics
        ## arcpy.Statistics_analysis(selectOut, outputFGDB + filename + '_stats', "Area SUM", "") ## CREATE PROJECT STATS
        arcpy.AddMessage('Finished merging')
예제 #4
0
inFeatures = os.path.join(workspce, cad, cad_use[1])
outFeatureClass = os.path.join(workspce, creat_gdb, point_name)
arcpy.FeatureToPoint_management(inFeatures, outFeatureClass)

where_clause = " RefName LIKE '5%' OR RefName LIKE '3%'"
select_fc = os.path.join(workspce, creat_gdb, select_anno)
arcpy.Select_analysis(os.path.join(workspce, creat_gdb, point_name), select_fc , where_clause)

outFeatureClass = os.path.join(workspce, creat_gdb, sp_fc)
arcpy.SpatialJoin_analysis(os.path.join(workspce, creat_gdb, polygon_name), 
                           os.path.join(workspce, creat_gdb, select_anno), 
                           outFeatureClass)

arcpy.MakeFeatureLayer_management(os.path.join(workspce, creat_gdb, sp_fc), "lyr")
arcpy.SelectLayerByLocation_management ("lyr", select_features = area_fc)
arcpy.CopyFeatures_management("lyr", os.path.join(workspce, creat_gdb, final_fc))

fc = os.path.join(workspce, creat_gdb, final_fc)
class_field = 'RefName'
with arcpy.da.SearchCursor(fc, class_field) as cursor:
    for row in cursor:
        doc = open(os.path.join(workspce, 'map_csv.csv'),'a') 
        if row[0] is not None:          
            doc.write(row[0])
            doc.write('\n')
        doc.close
print u"CSV文件生成!"    

end_time = time.time()
print "完毕,用时%.2f秒" % (end_time - start_time)
예제 #5
0
def STEP7_calc_centrality():
    """ Analyze network centrality using Circuitscape
        given Linkage Mapper outputs

    """
    try:
        lu.dashline(0)
        gprint('Running script ' + _SCRIPT_NAME)

        arcpy.env.workspace = cfg.SCRATCHDIR

        # Check for valid LCP shapefile
        prevLcpShapefile = lu.get_lcp_shapefile(None, thisStep=7)
        if not arcpy.Exists(prevLcpShapefile):
            msg = ('Cannot find an LCP shapefile from step 5.  Please '
                   'rerun that step and any previous ones if necessary.')
            lu.raise_error(msg)

        # Remove lcp shapefile from this step if run previously
        lcpShapefile = path.join(cfg.DATAPASSDIR, "lcpLines_s7.shp")
        lu.delete_data(lcpShapefile)

        invalidFNs = ['fid', 'id', 'oid', 'shape']
        if cfg.COREFN.lower() in invalidFNs:
            #if cfg.COREFN == 'FID' or cfg.COREFN == 'ID':
            lu.dashline(1)
            msg = ('ERROR: Core area field names ID, FID, SHAPE, and OID are'
                   ' reserved for ArcGIS. \nPlease choose another field- must'
                   ' be a positive integer.')
            lu.raise_error(msg)

        lu.dashline(1)
        gprint('Mapping centrality of network cores and links'
               '\nusing Circuitscape....')
        lu.dashline(0)

        # set the analysis extent and cell size to that of the resistance
        # surface
        coreCopy = path.join(cfg.SCRATCHDIR, 'cores.shp')

        arcpy.CopyFeatures_management(cfg.COREFC, coreCopy)
        exists = False
        field_names = [field.name for field in arcpy.ListFields(coreCopy)]
        if "CF_Central" in field_names:
            exists = True
        if not exists:
            # arcpy.AddField_management(coreCopy, "CF_Central", "DOUBLE", "10", "2")
            arcpy.AddField_management(coreCopy, "CF_Central", "DOUBLE")

        inLinkTableFile = lu.get_prev_step_link_table(step=7)
        linkTable = lu.load_link_table(inLinkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg = ('\nThere are no linkages. Bailing.')
            lu.raise_error(msg)

        if linkTable.shape[1] < 16:  # If linktable has no entries from prior
            # centrality or pinchpint analyses
            extraCols = npy.zeros((numLinks, 6), dtype="float64")
            linkTable = linkTable[:, 0:10]
            linkTable = npy.append(linkTable, extraCols, axis=1)
            linkTable[:, cfg.LTB_LCPLEN] = -1
            linkTable[:, cfg.LTB_CWDEUCR] = -1
            linkTable[:, cfg.LTB_CWDPATHR] = -1
            linkTable[:, cfg.LTB_EFFRESIST] = -1
            linkTable[:, cfg.LTB_CWDTORR] = -1
            del extraCols

        linkTable[:, cfg.LTB_CURRENT] = -1

        coresToProcess = npy.unique(linkTable[:,
                                              cfg.LTB_CORE1:cfg.LTB_CORE2 + 1])
        maxCoreNum = max(coresToProcess)
        del coresToProcess

        lu.dashline(0)

        coreList = linkTable[:, cfg.LTB_CORE1:cfg.LTB_CORE2 + 1]
        coreList = npy.sort(coreList)
        #gprint('There are ' + str(len(npy.unique(coreList))) ' core areas.')

        # set up directory for centrality
        INCENTRALITYDIR = cfg.CENTRALITYBASEDIR
        OUTCENTRALITYDIR = path.join(cfg.CENTRALITYBASEDIR,
                                     cfg.CIRCUITOUTPUTDIR_NM)
        CONFIGDIR = path.join(INCENTRALITYDIR, cfg.CIRCUITCONFIGDIR_NM)

        # Set Circuitscape options and write config file
        options = lu.setCircuitscapeOptions()
        options['data_type'] = 'network'
        options['habitat_file'] = path.join(INCENTRALITYDIR,
                                            'Circuitscape_graph.txt')
        # Setting point file equal to graph to do all pairs in Circuitscape
        options['point_file'] = path.join(INCENTRALITYDIR,
                                          'Circuitscape_graph.txt')
        outputFN = 'Circuitscape_network.out'
        options['output_file'] = path.join(OUTCENTRALITYDIR, outputFN)
        configFN = 'Circuitscape_network.ini'
        outConfigFile = path.join(CONFIGDIR, configFN)
        lu.writeCircuitscapeConfigFile(outConfigFile, options)

        delRows = npy.asarray(npy.where(linkTable[:, cfg.LTB_LINKTYPE] < 1))
        delRowsVector = npy.zeros((delRows.shape[1]), dtype="int32")
        delRowsVector[:] = delRows[0, :]
        LT = lu.delete_row(linkTable, delRowsVector)
        del delRows
        del delRowsVector
        graphList = npy.zeros((LT.shape[0], 3), dtype="float64")
        graphList[:, 0] = LT[:, cfg.LTB_CORE1]
        graphList[:, 1] = LT[:, cfg.LTB_CORE2]
        graphList[:, 2] = LT[:, cfg.LTB_CWDIST]

        write_graph(options['habitat_file'], graphList)
        gprint('\nCalculating current flow centrality using Circuitscape...')

        memFlag = lu.call_circuitscape(cfg.CSPATH, outConfigFile)

        outputFN = 'Circuitscape_network_branch_currents_cum.txt'
        currentList = path.join(OUTCENTRALITYDIR, outputFN)

        if not arcpy.Exists(currentList):
            write_graph(options['habitat_file'], graphList)
            gprint('\nCalculating current flow centrality using Circuitscape '
                   '(2nd try)...')
            memFlag = lu.call_circuitscape(cfg.CSPATH, outConfigFile)
            if not arcpy.Exists(currentList):
                lu.dashline(1)
                msg = ('ERROR: No Circuitscape output found.\n'
                       'It looks like Circuitscape failed.')
                arcpy.AddError(msg)
                lu.write_log(msg)
                exit(1)

        currents = load_graph(currentList,
                              graphType='graph/network',
                              datatype='float64')

        numLinks = currents.shape[0]
        for x in range(0, numLinks):
            corex = currents[x, 0]
            corey = currents[x, 1]

            #linkId = LT[x,cfg.LTB_LINKID]
            row = lu.get_links_from_core_pairs(linkTable, corex, corey)
            #row = lu.get_linktable_row(linkId, linkTable)
            linkTable[row, cfg.LTB_CURRENT] = currents[x, 2]

        coreCurrentFN = 'Circuitscape_network_node_currents_cum.txt'
        nodeCurrentList = path.join(OUTCENTRALITYDIR, coreCurrentFN)
        nodeCurrents = load_graph(nodeCurrentList,
                                  graphType='graph/network',
                                  datatype='float64')

        numNodeCurrents = nodeCurrents.shape[0]
        rows = arcpy.UpdateCursor(coreCopy)
        row = rows.newRow()
        for row in rows:
            coreID = row.getValue(cfg.COREFN)
            for i in range(0, numNodeCurrents):
                if coreID == nodeCurrents[i, 0]:
                    row.setValue("CF_Central", nodeCurrents[i, 1])
                    break
            rows.updateRow(row)
            #row = rows.newRow()
        del row, rows
        gprint('Done with centrality calculations.')

        finalLinkTable = lu.update_lcp_shapefile(linkTable,
                                                 lastStep=5,
                                                 thisStep=7)
        linkTableFile = path.join(cfg.DATAPASSDIR, "linkTable_s5_plus.csv")
        lu.write_link_table(finalLinkTable, linkTableFile, inLinkTableFile)
        linkTableFinalFile = path.join(cfg.OUTPUTDIR,
                                       cfg.PREFIX + "_linkTable_s5_plus.csv")
        lu.write_link_table(finalLinkTable, linkTableFinalFile,
                            inLinkTableFile)
        gprint('Copy of final linkTable written to ' + linkTableFinalFile)

        finalCoreFile = path.join(cfg.CORECENTRALITYGDB, cfg.PREFIX + '_Cores')
        #copy core area map to gdb.
        if not arcpy.Exists(cfg.CORECENTRALITYGDB):
            arcpy.CreateFileGDB_management(
                cfg.OUTPUTDIR, path.basename(cfg.CORECENTRALITYGDB))
        arcpy.CopyFeatures_management(coreCopy, finalCoreFile)

        gprint('Creating shapefiles with linework for links.')
        lu.write_link_maps(linkTableFinalFile, step=7)

        # Copy final link maps to gdb and clean up.
        lu.copy_final_link_maps(step=7)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 7. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except:
        lu.dashline(1)
        gprint('****Failed in step 7. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
def smallFeaturesCheck(inFds, outFds, mapScaleString, outHtml, tooShortArcMM,
                       tooSmallAreaMM2, tooSkinnyWidthMM):
    # get inputs
    inCaf = os.path.basename(getCaf(inFds))
    inMup = inCaf.replace('ContactsAndFaults', 'MapUnitPolys')
    nameToken = inCaf.replace('ContactsAndFaults', '')
    # set mapscale and mapunits
    mapUnit1 = arcpy.Describe(inFds).spatialReference.linearUnitName
    mapUnit1 = mapUnit1.upper()
    if mapUnit1.find('FOOT') > -1:
        mapUnits = 'feet'
    else:
        mapUnits = 'meters'
    mapScale = 1.0 / float(mapScaleString)

    tooShortArcLength = tooShortMM / 1000.0 / mapScale
    tooSmallPolyArea = tooSmallAreaMM2 / 1e6 / mapScale / mapScale
    #addMsgAndPrint(str(tooSmallAreaMM2)+'  '+str(tooSmallPolyArea))
    tooSkinnyWidth = tooSkinnyWidthMM / 1000 / mapScale
    if mapUnits == 'feet':
        tooShortArcLength = tooShortArcLength * 3.28
        tooSmallPolyArea = tooSmallPolyArea * 3.28 * 3.28
        tooSkinnyWidth = tooSkinnyWidth * 3.28

    tooShortArcs = outFds + '/errors_' + nameToken + 'ShortArcs'
    tooSmallPolys = outFds + '/errors_' + nameToken + 'SmallPolys'
    tooSmallPolyPoints = outFds + '/errors_' + nameToken + 'SmallPolyPoints'
    tooSkinnyPolys = outFds + '/errors_' + nameToken + 'SkinnyPolys'
    testAndDelete(tooShortArcs)
    testAndDelete(tooSmallPolys)
    testAndDelete(tooSmallPolyPoints)
    testAndDelete(tooSkinnyPolys)

    outHtml.write('<h3>Small feature inventory</h3>\n')
    outHtml.write('&nbsp;&nbsp; map scale = 1:' + mapScaleString + '<br>\n')

    # short arcs
    testAndDelete('cafLayer')
    arcpy.MakeFeatureLayer_management(
        inFds + '/' + inCaf, 'cafLayer',
        'Shape_Length < ' + str(tooShortArcLength))
    arcpy.CopyFeatures_management('cafLayer', tooShortArcs)
    outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooShortArcs)) +
                  ' arcs shorter than ' + str(tooShortMM) + ' mm<br>\n')
    if numberOfRows(tooShortArcs) == 0:
        testAndDelete(tooShortArcs)
    if arcpy.Exists(inMup):
        # small polys
        addMsgAndPrint('  tooSmallPolyArea = ' + str(tooSmallPolyArea))
        testAndDelete('mupLayer')
        arcpy.MakeFeatureLayer_management(
            inFds + '/' + inMup, 'mupLayer',
            'Shape_Area < ' + str(tooSmallPolyArea))
        arcpy.CopyFeatures_management('mupLayer', tooSmallPolys)
        addMsgAndPrint('  ' + str(numberOfRows(tooSmallPolys)) +
                       ' too-small polygons')
        arcpy.FeatureToPoint_management(tooSmallPolys, tooSmallPolyPoints,
                                        'INSIDE')
        outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooSmallPolys)) +
                      ' polys with area less than ' + str(tooSmallAreaMM2) +
                      ' mm<sup>2</sup><br>\n')
        # sliver polys
        arcpy.CopyFeatures_management(inFds + '/' + inMup, tooSkinnyPolys)
        testAndDelete('sliverLayer')
        arcpy.MakeFeatureLayer_management(tooSkinnyPolys, 'sliverLayer')
        arcpy.AddField_management('sliverLayer', 'AreaDivLength', 'FLOAT')
        arcpy.CalculateField_management('sliverLayer', 'AreaDivLength',
                                        "!Shape_Area! / !Shape_Length!",
                                        "PYTHON")
        arcpy.SelectLayerByAttribute_management(
            'sliverLayer', 'NEW_SELECTION',
            "AreaDivLength >= " + str(tooSkinnyWidth))
        arcpy.DeleteFeatures_management('sliverLayer')
        addMsgAndPrint('  tooSkinnyPolyWidth = ' + str(tooSkinnyWidth))
        addMsgAndPrint('  ' + str(numberOfRows(tooSkinnyPolys)) +
                       ' too-skinny polygons')

        outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooSkinnyPolys)) +
                      ' polys with area/length ratio less than ' +
                      str(tooSkinnyWidth) + ' ' + mapUnits + '<br>\n')
        for fc in (tooSkinnyPolys, tooSmallPolys):
            if numberOfRows(fc) == 0: testAndDelete(fc)
    else:
        outHtml.write('&nbsp;&nbsp; No MapUnitPolys feature class<br>\n')

        for xx in 'cafLayer', 'mupLayer', 'sliverLayer':
            testAndDelete(xx)

    return
예제 #7
0
# Copies all feature classes from one folder to another
import arcpy

try:
    arcpy.env.workspace = "C:/TmpWrkDirGIS/GEOG485/Lesson1"

    # List the feature classes in the Lesson 1 folder
    fcList = arcpy.ListFeatureClasses()

    # Loop through the list and copy the feature classes to the Lesson 2 PracticeData folder
    for featureClass in fcList:
        arcpy.CopyFeatures_management(
            featureClass,
            "C:/TmpWrkDirGIS/GEOG485/Lesson2/PracticeData/" + featureClass)

except:
    print("Script failed to complete")
    print(arcpy.GetMessages(2))
예제 #8
0
        arcpy.Delete_management(selected_fc)

    # Create buffer to display for visual clarity
    arcpy.Buffer_analysis(parcel_layer, buffer_fc, buffer_distance,
                          dissolve_option = "ALL")
    arcpy.SetParameter(8, buffer_fc)

    # Select nearby features
    selection = arcpy.SelectLayerByLocation_management(parcel_layer,
                                                overlap_type = "WITHIN_A_DISTANCE",
                                                select_features = parcel_layer,
                                                search_distance = buffer_distance,
                                                selection_type = "NEW_SELECTION")

    # Make layer of selected features to display for visual clarity
    arcpy.CopyFeatures_management(parcel_layer, selected_fc)
    arcpy.SetParameter(9, selected_fc)

    # ========= Create table view of neighbor parcels from assessor ===========
    # Get nearby parcel IDs
    nearby_parcels = []
    with arcpy.da.SearchCursor(parcel_layer, tid_field) as parcel_cursor:
        nearby_parcels = ["\'%s\'" %(r[0]) for r in parcel_cursor]

    # Table definition query
    if len(nearby_parcels) > 1:
        table_tid_string = ", ".join(nearby_parcels)
    elif len(nearby_parcels) == 1:
        table_tid_string = nearby_parcels[0]
    else:
        table_tid_string = ""
예제 #9
0
                                          "")
            elif field == fieldlist[4]:
                arcpy.AddField_management(out_final, field, "TEXT", "", "",
                                          "10", "", "NULLABLE", "NON_REQUIRED",
                                          "")
    except:
        pass

    filename = os.path.basename(out_final)
    with arcpy.da.UpdateCursor(out_final, "FileName") as cursor:
        for row in cursor:
            row[0] = filename
            cursor.updateRow(row)
    del cursor, row

    arcpy.Delete_management(out_merge)

    # archives the two original files that were just mereged
    for fc in locationlist:
        filename = os.path.basename(fc)
        archivefc = archiveGDB + os.sep + filename
        if not arcpy.Exists(archivefc):
            arcpy.CopyFeatures_management(fc, archivefc)
        arcpy.Delete_management(fc)

end = datetime.datetime.now()
print "End Time: " + end.ctime()

elapsed = end - start_time
print "Elapsed  Time: " + str(elapsed)
예제 #10
0
def getFields(data):
    fieldList = []
    fields = arcpy.ListFields(data)
    for field in fields:
        fieldList.append(field.name)
    return fieldList


'''
#####################################################################################
#### --------------------------------GEOPROCESSES--------------------------------####
#####################################################################################
'''
## Copy input files to memory:

zones = arcpy.CopyFeatures_management(zoneInput, "in_memory/zoneInputs")
projects = arcpy.CopyFeatures_management(projectInput,
                                         "in_memory/projectInputs")
'''
#########################
## Calculate distances ##
#########################
'''
## Create dictionary with input feature classes and their names to use as fields
input_dict = {"d_tra": trans, "d_sub": sub, "d_roa": road, "d_pv": PVplant, \
              "d_geo": geothermalPlant, "d_any": anyRE, "d_loa": loadCenter, "d_wat": water, \
              newFieldName: newFC}

## get all the fields in Project FC:
projectFields = arcpy.ListFields(projects)
projectFieldNames = []
예제 #11
0
    basinbr = os.path.join(dcigdb, "{}_br".format(os.path.split(basin)[1]))
    basinbrinter = os.path.join(dcigdb, "{}_brinter".format(os.path.split(basin)[1]))
    basinbrproj = basinbr + 'proj'
    if not arcpy.Exists(basinbr):
        print('Filter BasinATLAS for Brazil for level {}...'.format(level))
        if not 'AREA_GEOFULL' in [f.name for f in arcpy.ListFields(basin)]:
            arcpy.AddGeometryAttributes_management(basin, 'AREA_GEODESIC', Area_Unit = "SQUARE_KILOMETERS")
            arcpy.AlterField_management(basin, 'AREA_GEO', new_field_name='AREA_GEOFULL', new_field_alias = 'AREA_GEOFULL')
        arcpy.Intersect_analysis([basin, br_bound], basinbrinter)
        arcpy.AddGeometryAttributes_management(basinbrinter, 'AREA_GEODESIC', Area_Unit = 'SQUARE_KILOMETERS')
        #Only keep basins that have at least 1% of their area within Brazil
        arcpy.MakeFeatureLayer_management(basinbrinter, 'basinbrinterlyr',
                                          where_clause= '(AREA_GEO/AREA_GEOFULL) > 0.01')
        basinbrlist = [row[0] for row in arcpy.da.SearchCursor('basinbrinterlyr', ['HYBAS_ID'])]
        arcpy.MakeFeatureLayer_management(basin, 'basinbrlyr', where_clause= 'HYBAS_ID IN {}'.format(tuple(basinbrlist)))
        arcpy.CopyFeatures_management('basinbrlyr', basinbr)

        #Subset StreamATLAS based on level 4 basins that intersect Brazil ----
        if level == '04':
            arcpy.Clip_analysis(streamatlas, 'basinbrlyr', snbr_original)

        #Project basins
        arcpy.Project_management(basinbr, basinbrproj, crsSIRGAS)

#---- Remove suspended and non-licensed dams ----
arcpy.MakeFeatureLayer_management(dams_original, 'dams_sublyr')
arcpy.SelectLayerByAttribute_management('dams_sublyr', 'NEW_SELECTION', where_clause= "NOT {0} IN ('Desativado' , 'Revogado', 'Extinta')".format('"ESTAGIO_1"'))

#-----Clean and project river network + project dams ----
#Identify and remove the bugs in the streamATLAS network
#Project dam dataset and import it into gdb
                 str(ncurrentstep) + "/" + str(nstep))

MakeToPts = arcpy.MakeFeatureLayer_management(ToPts,
                                              "%ScratchWorkspace%\\MakeToPts")
Selection = arcpy.SelectLayerByAttribute_management(MakeToPts, "NEW_SELECTION",
                                                    "\"Inflection\" = 1")
NearTable = arcpy.GenerateNearTable_analysis(Selection, inFC,
                                             "%ScratchWorkspace%\\NearTable",
                                             "", "LOCATION", "NO_ANGLE", "")

SpatialRef = arcpy.Describe(inFC).spatialReference
ProxyPtsTEMP = arcpy.MakeXYEventLayer_management(NearTable, "NEAR_X", "NEAR_Y",
                                                 "ProxyPtsTEMP", SpatialRef,
                                                 "")

PtsForInflLine = arcpy.CopyFeatures_management(
    ProxyPtsTEMP, "%ScratchWorkspace%\\PtsForInflLine")
PtsForSplitting = arcpy.CopyFeatures_management(
    ProxyPtsTEMP, "%ScratchWorkspace%\\PtsForSplitting")
arcpy.JoinField_management(PtsForInflLine, "IN_FID", ToPts, "OBJECTID",
                           ["Order_ID", "ORIG_FID", "NEAR_X", "NEAR_Y"])
arcpy.JoinField_management(PtsForSplitting, "IN_FID", ToPts, "OBJECTID",
                           ["Order_ID", "ORIG_FID", "NEAR_X", "NEAR_Y"])

# Shaping the inflection points
ncurrentstep += 1
arcpy.AddMessage(
    "Formating inflection points feature in order to create the final inflection line - Step "
    + str(ncurrentstep) + "/" + str(nstep))

arcpy.AddField_management(PtsForInflLine, "Rank_UGO", "SHORT", "", "", "", "",
                          "NULLABLE", "NON_REQUIRED", "")
예제 #13
0
import random
import numpy  
env.overwriteoutput = True

# The following line is for code testing ONLY and will be overwritten automatically upon running the sript.
shapefileInput = "C:/Users/Elliot/Documents/Senior Year/Spring 2018/GEOG 5223 - GIS Design & Imple/Group Project/Data/tl_2017_us_state.shp"
shapefileOutput = "C:/Users/Elliot/Documents/Senior Year/Spring 2018/GEOG 5223 - GIS Design & Imple/Group Project/Output/Output1.shp"

# The following line MUST be uncommented before use in ArcToolbox.
#shapefileInput = arcpy.GetParameterAsText(0)
#shapefileOutput = arcpy.GetParameterAsText(1)

# Below, a temporary shapefile, containing the contents of the inputted shapefile, is created in the scratch geodatabase.
numberOfColors = 5;
shapefile = arcpy.env.scratchGDB + os.path.sep + "temporary1"
arcpy.CopyFeatures_management(shapefileInput, shapefile)

# Below, a text field is added to the temporary shapefile, which will hold the unqiue identifying code for each color group.
newField1 = arcpy.ValidateFieldName("ColorGroup", shapefile)
arcpy.AddField_management (shapefile, newField1, "INT")

# Below, a text field is added to the temporary shapefile, which will hold a list of the color codes of the polygons that each polygon borders.
newField12 = arcpy.ValidateFieldName("neighbors", shapefile)
arcpy.AddField_management (shapefile, newField2, "TEXT")

# Below, a text field (xyIdent) is added to the temporary shapefile, which will hold the unqiue identifying code for each individual polygon feature in the shapefile.
arcpy.AddGeometryAttributes_management (shapefile, "CENTROID_INSIDE", "", "", "")
newField3 = arcpy.ValidateFieldName("xyIdent", shapefile)
arcpy.AddField_management (shapefile, newField3, "TEXT")
arcpy.CalculateField_management (shapefile, "xyIdent", "str(!INSIDE_X!) + str( !INSIDE_Y!)", "PYTHON")
arcpy.DeleteField_management (shapefile, "INSIDE_X")
예제 #14
0
def objectifyRaster(rasterName, folderName):
    # Get licensed
    if arcpy.CheckExtension("Spatial"):
        arcpy.CheckOutExtension("Spatial")
    else:
        print "No SA licence"
        exit
    # Load the environment
    env.workspace = "C:/Users/hengstam/Desktop/projects/proglacial"
    # Make sure we can mess with stuff
    arcpy.env.overwriteOutput = True
    #####################################################
    ####				VECTORIZE IT				 ####
    #####################################################
    inputRasterFilename = '/rasters/' + folderName + '/' + rasterName + '.tif'
    # Set up temp files names
    tempRasterFilename = '/temp/rasters/' + rasterName + '.tif'
    tempMorphRasterFilename = '/temp/rasters/' + rasterName + '_morph.tif'
    tempShapesFilename = '/temp/polygons/' + rasterName + '.shp'
    # Set up our output
    outputShapesFilename = "/polygons/" + folderName + '/' + rasterName + ".shp"
    # Clear potiential old output
    if arcpy.Exists(tempRasterFilename):
        arcpy.Delete_management(tempRasterFilename)
    if arcpy.Exists(tempMorphRasterFilename):
        arcpy.Delete_management(tempRasterFilename)
    if arcpy.Exists(tempShapesFilename):
        arcpy.Delete_management(tempShapesFilename)
    if arcpy.Exists(outputShapesFilename):
        arcpy.Delete_management(outputShapesFilename)
    print "Converting raster " + inputRasterFilename + " into " + outputShapesFilename + '...'
    # Select water and save it to a temp location for morphological operations
    waterRasterBeforeMorpOps = arcpy.sa.Con(
        inputRasterFilename, 1, 0,
        "Value <= 6 AND Value >= 4")  #"Value <= 10 AND Value >= 7")
    waterRasterBeforeMorpOps.save(env.workspace + tempRasterFilename)
    arcpy.CopyRaster_management(env.workspace + tempRasterFilename,
                                env.workspace + tempMorphRasterFilename,
                                pixel_type="1_BIT")
    # Move to the next step
    print "Raster successfully imported."
    ##############################################
    ## Apply morphological operations to the water
    # print "Loading OpenCV2..."
    # print "Preforming morphological operations on " +env.workspace+tempMorphRasterFilename + "..."
    # # Clear memory
    # gc.collect()
    # # Load it into cv2
    # img = cv2.imread(env.workspace+tempMorphRasterFilename, 0)
    # # Open it
    # print "Applying morphological opening operations..."
    # opening = cv2.morphologyEx(img, cv2.MORPH_OPEN, np.ones((5, 5), np.uint8))
    # # Save it
    # cv2.imwrite(env.workspace+tempMorphRasterFilename, opening)
    # print "Raster successfully opened."
    #############################
    ## Reload the modified raster
    # Load it
    waterRasterAfterMorphOps = arcpy.Raster(tempMorphRasterFilename)
    # Clear the falses out for compatability with arcpy
    waterRasterAfterMorphOps = arcpy.sa.SetNull(waterRasterAfterMorphOps != 1,
                                                waterRasterAfterMorphOps)
    ###########################
    ## Make the output
    print "Making a feature class at " + tempShapesFilename + "..."
    # Make a new feature class
    arcpy.CreateFeatureclass_management(env.workspace, tempShapesFilename,
                                        "POLYGON")
    # Convert it
    print "Converting raster to temp polygons..."
    arcpy.RasterToPolygon_conversion(waterRasterAfterMorphOps,
                                     tempShapesFilename, "NO_SIMPLIFY")
    print "Raster successfully converted."
    #####################################################
    ####			 PROCESS IT NOW PLZ				 ####
    #####################################################
    # Copy it over
    print "Processing " + tempShapesFilename + " to " + outputShapesFilename + "..."
    arcpy.CopyFeatures_management(tempShapesFilename, outputShapesFilename)
    # Get the date and location
    dateCode = {
        "1_": "1",
        "2_": "2",
        "3_": "3",
        "4_": "4",
        "5_": "5",
        "6_": "6",
        "7_": "7",
        "8_": "8",
        "9_": "9",
        "10_": "10",
        "11_": "11",
        "12_": "12",
        "7_9_": "13",
        "5_6_": "20",
        "6_7_": "21",
        "7_8_": "22",
        "8_9_": "23",
        "9_10_": "24",
    }
    date = int(
        re.search(r"(?<=Y)[0-9]+", rasterName).group() +
        dateCode[re.search(r"(?<=M)[0-9_]+", rasterName).group()])
    # loc = (int(rasterName[10:13]), int(rasterName[13:16]))
    # print "Date:", date, "Location:", loc
    ###################################
    ## Calculate area and get centroids
    print "Detailing shapefiles..."
    # Add fields
    arcpy.AddField_management(outputShapesFilename, "area", "DOUBLE")
    arcpy.AddField_management(outputShapesFilename, "centr_x", "DOUBLE")
    arcpy.AddField_management(outputShapesFilename, "centr_y", "DOUBLE")
    arcpy.AddField_management(outputShapesFilename, "lake_id", "STRING")
    arcpy.AddField_management(outputShapesFilename, "date", "LONG")
    arcpy.AddField_management(outputShapesFilename, "loc1", "SHORT")
    arcpy.AddField_management(outputShapesFilename, "loc2", "SHORT")
    # Write area value
    arcpy.CalculateField_management(outputShapesFilename, "area",
                                    "!SHAPE.AREA@SQUAREKILOMETERS!",
                                    "PYTHON_9.3")
    # Build a cursor to set our new fields
    cursor = arcpy.da.UpdateCursor(
        outputShapesFilename,
        ["SHAPE@TRUECENTROID", "centr_x", "centr_y", "date", "loc1", "loc2"])
    # Start summing area
    minAreaThreshold = 0.1  # 0.001
    # Work through all lakeOutputName in the feature class
    for row in cursor:
        # Write centroid values
        row[1] = row[0][0]
        row[2] = row[0][1]
        # Write date and location
        row[3] = date
        # row[4] = loc[0]
        # row[5] = loc[1]
        # Save it
        cursor.updateRow(row)
    # Clean up cursor objects
    del row, cursor
    print "Shapefiles successfully detailed."
    ################################################
    ## Only save large polygons (originally more than 0.1 km^2, but see above where `minAreaThreshold` is defined.)
    print "Removing small polygons..."
    arcpy.MakeFeatureLayer_management(outputShapesFilename,
                                      "removingSmallLakes_lyr")
    arcpy.SelectLayerByAttribute_management("removingSmallLakes_lyr",
                                            "NEW_SELECTION",
                                            "area < " + str(minAreaThreshold))
    arcpy.DeleteFeatures_management("removingSmallLakes_lyr")
    print "Small polygons successfully removed."
    ###########################
    ## Name the remaining lakes
    print "Naming lakes..."
    # Make a cursor to update the stuff
    cursor = arcpy.da.UpdateCursor(
        outputShapesFilename, ["SHAPE@AREA", "SHAPE@TRUECENTROID", "lake_id"])
    # n is used to count the number of lakes, which is displayed at the end of this script.
    n = 0
    # Go through all lakes in the feature class
    for row in cursor:
        # Counting works like this
        n += 1
        # Make hash
        m = hashlib.sha224()
        # Use centroid, area, and date to mutate hash
        m.update(str(row[1]))
        m.update(str(row[1][0]))
        m.update(str(row[1][1]))
        # m.update(str(date))
        # Save it
        row[2] = m.hexdigest()
        cursor.updateRow(row)
    # Clean up cursor objects
    del cursor
    # IO
    print "Success! " + str(n) + " lakes found and named."
    print "Output located at " + outputShapesFilename + '.'
예제 #15
0
import arcpy, os, random

point_list = []
for i in range(0, 100):
    point_list.append(arcpy.Point(random.randint(73, 135), random.randint(0, 90)))
out_fc = os.getcwd() + os.sep + "TestSQLPoint.shp"
pointGeometryList = []
for point in point_list:
    pointGeometryList.append(arcpy.PointGeometry(point))
arcpy.CopyFeatures_management(pointGeometryList, out_fc)
spatialReference = arcpy.SpatialReference(4326)
arcpy.DefineProjection_management(out_fc, spatialReference)
arcpy.AddField_management(out_fc, "Y", "SHORT")
arcpy.CalculateField_management(out_fc, "Y", "!shape.centroid.Y!", "PYTHON_9.3")
field_delimiters = arcpy.AddFieldDelimiters(os.getcwd(), "Y")
arcpy.AddField_management(out_fc, "CLASS", "TEXT", field_length=50)
where_clause = field_delimiters + "<=30"
with arcpy.da.UpdateCursor(out_fc, ["CLASS"], where_clause) as cursor:
    for row in cursor:
        row[0] = "Low latitude"
        cursor.updateRow(row)
예제 #16
0
def main(fcInputCenterline,
         fcInputPolygon,
         fcSegmentedPolygons,
         dblPointDensity=10.0,
         dblJunctionBuffer=120.00):

    workspaceTemp = "in_memory"

    arcpy.env.OutputMFlag = "Disabled"
    arcpy.env.OutputZFlag = "Disabled"

    # Copy centerline to temporary workspace
    fcCenterline = gis_tools.newGISDataset(workspaceTemp,
                                           "GNAT_DPS_Centerline")
    arcpy.CopyFeatures_management(fcInputCenterline, fcCenterline)

    # Build Thiessan polygons
    arcpy.AddMessage("GNAT DPS: Building Thiessan polygons")
    arcpy.env.extent = fcInputPolygon  ## Set full extent to build Thiessan polygons over entire line network.
    arcpy.Densify_edit(fcCenterline, "DISTANCE",
                       str(dblPointDensity) + " METERS")

    fcTribJunctionPoints = gis_tools.newGISDataset(
        workspaceTemp,
        "GNAT_DPS_TribJunctionPoints")  # All Segment Junctions??
    arcpy.Intersect_analysis(fcCenterline,
                             fcTribJunctionPoints,
                             output_type="POINT")

    fcThiessanPoints = gis_tools.newGISDataset(workspaceTemp,
                                               "GNAT_DPS_ThiessanPoints")
    arcpy.FeatureVerticesToPoints_management(fcCenterline, fcThiessanPoints,
                                             "ALL")

    lyrThiessanPoints = gis_tools.newGISDataset("Layer", "lyrThiessanPoints")
    arcpy.MakeFeatureLayer_management(fcThiessanPoints, lyrThiessanPoints)
    arcpy.SelectLayerByLocation_management(lyrThiessanPoints, "INTERSECT",
                                           fcTribJunctionPoints,
                                           str(dblJunctionBuffer) + " METERS",
                                           "NEW_SELECTION")

    fcThiessanPoly = gis_tools.newGISDataset(workspaceTemp,
                                             "GNAT_DPS_ThiessanPoly")
    #arcpy.CreateThiessenPolygons_analysis(lyrThiessanPoints,fcThiessanPoly,"ONLY_FID")
    arcpy.CreateThiessenPolygons_analysis(lyrThiessanPoints, fcThiessanPoly,
                                          "ALL")

    fcThiessanPolyClip = gis_tools.newGISDataset(workspaceTemp,
                                                 "GNAT_DPS_TheissanPolyClip")
    arcpy.Clip_analysis(fcThiessanPoly, fcInputPolygon, fcThiessanPolyClip)

    # Split the junction Thiessan polygons
    arcpy.AddMessage("GNAT DPS: Split junction Thiessan polygons")
    lyrTribThiessanPolys = gis_tools.newGISDataset("Layer",
                                                   "lyrTribThiessanPolys")
    arcpy.MakeFeatureLayer_management(fcThiessanPolyClip, lyrTribThiessanPolys)
    arcpy.SelectLayerByLocation_management(lyrTribThiessanPolys,
                                           "INTERSECT",
                                           fcTribJunctionPoints,
                                           selection_type="NEW_SELECTION")

    fcSplitPoints = gis_tools.newGISDataset(workspaceTemp,
                                            "GNAT_DPS_SplitPoints")
    arcpy.Intersect_analysis([lyrTribThiessanPolys, fcCenterline],
                             fcSplitPoints,
                             output_type="POINT")

    arcpy.AddMessage("GNAT DPS: Moving starting vertices of junction polygons")
    geometry_functions.changeStartingVertex(fcTribJunctionPoints,
                                            lyrTribThiessanPolys)

    arcpy.AddMessage("GNAT DPS: Vertices moved")
    fcThiessanTribPolyEdges = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_ThiessanTribPolyEdges")
    arcpy.FeatureToLine_management(lyrTribThiessanPolys,
                                   fcThiessanTribPolyEdges)

    fcSplitLines = gis_tools.newGISDataset(workspaceTemp,
                                           "GNAT_DPS_SplitLines")
    arcpy.SplitLineAtPoint_management(fcThiessanTribPolyEdges, fcSplitPoints,
                                      fcSplitLines, "0.1 METERS")

    fcMidPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_MidPoints")
    arcpy.FeatureVerticesToPoints_management(fcSplitLines, fcMidPoints, "MID")
    arcpy.Near_analysis(fcMidPoints, fcTribJunctionPoints, location="LOCATION")
    arcpy.AddXY_management(fcMidPoints)

    fcTribToMidLines = gis_tools.newGISDataset(workspaceTemp,
                                               "GNAT_DPS_TribToMidLines")
    arcpy.XYToLine_management(fcMidPoints, fcTribToMidLines, "POINT_X",
                              "POINT_Y", "NEAR_X", "NEAR_Y")

    ### Select polygons by centerline ###
    arcpy.AddMessage("GNAT DPS: Select polygons by centerline")
    fcThiessanEdges = gis_tools.newGISDataset(workspaceTemp,
                                              "GNAT_DPS_ThiessanEdges")
    arcpy.FeatureToLine_management(fcThiessanPolyClip, fcThiessanEdges)

    fcAllEdges = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_AllEdges")
    arcpy.Merge_management([fcTribToMidLines, fcThiessanEdges, fcCenterline],
                           fcAllEdges)  # include fcCenterline if needed

    fcAllEdgesPolygons = gis_tools.newGISDataset(workspaceTemp,
                                                 "GNAT_DPS_AllEdgesPolygons")
    arcpy.FeatureToPolygon_management(fcAllEdges, fcAllEdgesPolygons)

    fcAllEdgesPolygonsClip = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_AllEdgesPolygonsClip")
    arcpy.Clip_analysis(fcAllEdgesPolygons, fcInputPolygon,
                        fcAllEdgesPolygonsClip)

    fcPolygonsJoinCenterline = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_PolygonsJoinCenterline")
    arcpy.SpatialJoin_analysis(fcAllEdgesPolygonsClip,
                               fcCenterline,
                               fcPolygonsJoinCenterline,
                               "JOIN_ONE_TO_MANY",
                               "KEEP_ALL",
                               match_option="SHARE_A_LINE_SEGMENT_WITH")

    fcPolygonsDissolved = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_PolygonsDissolved")
    arcpy.Dissolve_management(fcPolygonsJoinCenterline,
                              fcPolygonsDissolved,
                              "FromID",
                              multi_part="SINGLE_PART")

    lyrPolygonsDissolved = gis_tools.newGISDataset("Layer",
                                                   "lyrPolygonsDissolved")
    arcpy.MakeFeatureLayer_management(fcPolygonsDissolved,
                                      lyrPolygonsDissolved)
    arcpy.SelectLayerByAttribute_management(lyrPolygonsDissolved,
                                            "NEW_SELECTION",
                                            """ "FromID" IS NULL """)

    arcpy.Eliminate_management(lyrPolygonsDissolved, fcSegmentedPolygons,
                               "LENGTH")

    arcpy.AddMessage("GNAT DPS: Tool complete")
    return
예제 #17
0
파일: geology.py 프로젝트: ERIS-GIS/GIS_Dev
def generate_geology_report(order_obj):
    arcpy.AddMessage('  -- Start generating PSR geology report...')
    start = timeit.default_timer()
    ### set scratch folder
    arcpy.env.workspace = config.scratch_folder
    arcpy.env.overwriteOutput = True
    output_jpg_geology = config.output_jpg(order_obj,
                                           config.Report_Type.geology)
    page = 1
    if '10685' not in order_obj.psr.search_radius.keys():
        arcpy.AddMessage('      -- Geology search radius is not availabe')
        return
    config.buffer_dist_geology = str(
        order_obj.psr.search_radius['10685']) + ' MILES'

    ### create buffer map based on order geometry
    arcpy.Buffer_analysis(config.order_geometry_pcs_shp,
                          config.order_buffer_shp, config.buffer_dist_geology)

    arcpy.MakeFeatureLayer_management(config.data_geology, 'geology_lyr')
    arcpy.SelectLayerByLocation_management('geology_lyr', 'intersect',
                                           config.order_buffer_shp)
    arcpy.CopyFeatures_management('geology_lyr',
                                  config.geology_selectedby_order_shp)

    arcpy.Statistics_analysis(
        config.geology_selectedby_order_shp,
        os.path.join(config.scratch_folder, "summary_geology.dbf"),
        [['UNIT_NAME', 'FIRST'], ['UNIT_AGE', 'FIRST'], ['ROCKTYPE1', 'FIRST'],
         ['ROCKTYPE2', 'FIRST'], ['UNITDESC', 'FIRST'],
         ['ERIS_KEY_1', 'FIRST']], 'ORIG_LABEL')
    arcpy.Sort_management(
        os.path.join(config.scratch_folder, "summary_geology.dbf"),
        os.path.join(config.scratch_folder, "summary_sorted_geol.dbf"),
        [["ORIG_LABEL", "ASCENDING"]])

    mxd_geology = arcpy.mapping.MapDocument(config.mxd_file_geology)
    df_geology = arcpy.mapping.ListDataFrames(mxd_geology, "*")[0]
    df_geology.spatialReference = order_obj.spatial_ref_pcs

    ### add order and order_buffer layers to geology mxd file
    utility.add_layer_to_mxd("order_buffer", df_geology,
                             config.buffer_lyr_file, 1.1)
    utility.add_layer_to_mxd("order_geometry_pcs", df_geology,
                             config.order_geom_lyr_file, 1)

    if not config.if_multi_page:  # single-page
        #df.scale = 5000
        mxd_geology.saveACopy(
            os.path.join(config.scratch_folder, "mxd_geology.mxd"))
        arcpy.mapping.ExportToJPEG(mxd_geology, output_jpg_geology,
                                   "PAGE_LAYOUT", 480, 640, 150, "False",
                                   "24-BIT_TRUE_COLOR", 85)
        if not os.path.exists(
                os.path.join(config.report_path, 'PSRmaps', order_obj.number)):
            os.mkdir(
                os.path.join(config.report_path, 'PSRmaps', order_obj.number))
        shutil.copy(
            output_jpg_geology,
            os.path.join(config.report_path, 'PSRmaps', order_obj.number))
        arcpy.AddMessage(
            '      - output jpg image: %s' %
            os.path.join(config.report_path, 'PSRmaps', order_obj.number,
                         os.path.basename(output_jpg_geology)))
        del mxd_geology
        del df_geology
    else:  # multipage
        grid_lyr_shp = os.path.join(config.scratch_folder,
                                    'grid_lyr_geology.shp')
        arcpy.GridIndexFeatures_cartography(grid_lyr_shp,
                                            config.order_buffer_shp, "", "",
                                            "", config.grid_size,
                                            config.grid_size)

        # part 1: the overview map
        # add grid layer
        grid_layer = arcpy.mapping.Layer(config.grid_lyr_file)
        grid_layer.replaceDataSource(config.scratch_folder,
                                     "SHAPEFILE_WORKSPACE", "grid_lyr_geology")
        arcpy.mapping.AddLayer(df_geology, grid_layer, "Top")

        df_geology.extent = grid_layer.getExtent()
        df_geology.scale = df_geology.scale * 1.1

        mxd_geology.saveACopy(
            os.path.join(config.scratch_folder, "mxd_geology.mxd"))
        arcpy.mapping.ExportToJPEG(mxd_geology, output_jpg_geology,
                                   "PAGE_LAYOUT", 480, 640, 150, "False",
                                   "24-BIT_TRUE_COLOR", 85)

        if not os.path.exists(
                os.path.join(config.report_path, 'PSRmaps', order_obj.number)):
            os.mkdir(
                os.path.join(config.report_path, 'PSRmaps', order_obj.number))
        shutil.copy(
            output_jpg_geology,
            os.path.join(config.report_path, 'PSRmaps', order_obj.number))
        arcpy.AddMessage(
            '      - output jpg image page 1: %s' %
            os.path.join(config.report_path, 'PSRmaps', order_obj.number,
                         os.path.basename(output_jpg_geology)))
        del mxd_geology
        del df_geology

        # part 2: the data driven pages

        page = int(arcpy.GetCount_management(grid_lyr_shp).getOutput(0)) + 1
        mxd_mm_geology = arcpy.mapping.MapDocument(config.mxd_mm_file_geology)

        df_mm_geology = arcpy.mapping.ListDataFrames(mxd_mm_geology, "*")[0]
        df_mm_geology.spatialReference = order_obj.spatial_ref_pcs
        utility.add_layer_to_mxd("order_buffer", df_mm_geology,
                                 config.buffer_lyr_file, 1.1)
        utility.add_layer_to_mxd("order_geometry_pcs", df_mm_geology,
                                 config.order_geom_lyr_file, 1)

        grid_layer_mm = arcpy.mapping.ListLayers(mxd_mm_geology, "Grid",
                                                 df_mm_geology)[0]
        grid_layer_mm.replaceDataSource(config.scratch_folder,
                                        "SHAPEFILE_WORKSPACE",
                                        "grid_lyr_geology")
        arcpy.CalculateAdjacentFields_cartography(grid_lyr_shp, "PageNumber")
        mxd_mm_geology.saveACopy(
            os.path.join(config.scratch_folder, "mxd_mm_geology.mxd"))

        for i in range(
                1,
                int(arcpy.GetCount_management(grid_lyr_shp).getOutput(0)) + 1):
            arcpy.SelectLayerByAttribute_management(
                grid_layer_mm, "NEW_SELECTION", ' "PageNumber" =  ' + str(i))
            df_mm_geology.extent = grid_layer_mm.getSelectedExtent(True)
            df_mm_geology.scale = df_mm_geology.scale * 1.1
            arcpy.SelectLayerByAttribute_management(grid_layer_mm,
                                                    "CLEAR_SELECTION")

            title_text = arcpy.mapping.ListLayoutElements(
                mxd_mm_geology, "TEXT_ELEMENT", "title")[0]
            title_text.text = "Geologic Units - Page " + str(i)
            title_text.elementPositionX = 0.6303
            arcpy.RefreshTOC()

            arcpy.mapping.ExportToJPEG(
                mxd_mm_geology, output_jpg_geology[0:-4] + str(i) + ".jpg",
                "PAGE_LAYOUT", 480, 640, 150, "False", "24-BIT_TRUE_COLOR", 85)
            if not os.path.exists(
                    os.path.join(config.report_path, 'PSRmaps',
                                 order_obj.number)):
                os.mkdir(
                    os.path.join(config.report_path, 'PSRmaps',
                                 order_obj.number))
            shutil.copy(
                output_jpg_geology[0:-4] + str(i) + ".jpg",
                os.path.join(config.report_path, 'PSRmaps', order_obj.number))
            # arcpy.AddMessage('      - output jpg image: %s' % os.path.join(config.report_path, 'PSRmaps', order_obj.number, os.path.basename(output_jpg_geology[0:-4]+str(i)+".jpg")))
        del mxd_mm_geology
        del df_mm_geology
        psr_obj = models.PSR()
        for i in range(1, page):
            psr_obj.insert_map(order_obj.id, 'GEOL',
                               order_obj.number + '_US_GEOL' + str(i) + '.jpg',
                               i + 1)

    if (int(
            arcpy.GetCount_management(
                os.path.join(config.scratch_folder,
                             "summary_sorted_geol.dbf")).getOutput(0)) == 0):
        # no geology polygon selected...., need to send in map only
        arcpy.AddMessage('No geology polygon is selected....')
        psr_obj = models.PSR()
        psr_obj.insert_map(order_obj.id, 'GEOL',
                           order_obj.number + '_US_GEOLOGY.jpg',
                           1)  #note type 'SOIL' or 'GEOL' is used internally
    else:
        eris_id = 0
        psr_obj = models.PSR()
        in_rows = arcpy.SearchCursor(
            os.path.join(config.scratch_folder,
                         config.geology_selectedby_order_shp))
        for in_row in in_rows:
            # note the column changed in summary dbf
            # arcpy.AddMessage("Unit label is: " + in_row.ORIG_LABEL)
            # arcpy.AddMessage(in_row.UNIT_NAME)     # unit name
            # arcpy.AddMessage(in_row.UNIT_AGE)      # unit age
            # arcpy.AddMessage( in_row.ROCKTYPE1)      # rocktype 1
            # arcpy.AddMessage( in_row.ROCKTYPE2)      # rocktype2
            # arcpy.AddMessage( in_row.UNITDESC)       # unit description
            # arcpy.AddMessage( in_row.ERIS_KEY_1)     # eris key created from upper(unit_link)
            eris_id = eris_id + 1
            config.geology_ids.append([in_row.ERIS_KEY_1, eris_id])
            psr_obj.insert_order_detail(order_obj.id, eris_id, '10685')
            psr_obj.insert_flex_rep(order_obj.id, eris_id, '10685', 2, 'S1', 1,
                                    'Geologic Unit ' + in_row.ORIG_LABEL, '')
            psr_obj.insert_flex_rep(order_obj.id, eris_id, '10685', 2, 'N', 2,
                                    'Unit Name: ', in_row.UNIT_NAME)
            psr_obj.insert_flex_rep(order_obj.id, eris_id, '10685', 2, 'N', 3,
                                    'Unit Age: ', in_row.UNIT_AGE)
            psr_obj.insert_flex_rep(order_obj.id, eris_id, '10685', 2, 'N', 4,
                                    'Primary Rock Type ', in_row.ROCKTYPE1)
            psr_obj.insert_flex_rep(order_obj.id, eris_id, '10685', 2, 'N', 4,
                                    'Secondary Rock Type: ', in_row.ROCKTYPE2)
            if in_row.UNITDESC == None:
                node_scr = 'No description available.'
                psr_obj.insert_flex_rep(order_obj.id, eris_id, '10685', 2, 'N',
                                        6, 'Unit Description: ', node_scr)
            else:
                psr_obj.insert_flex_rep(order_obj.id, eris_id, '10685', 2, 'N',
                                        6, 'Unit Description: ',
                                        in_row.UNITDESC.encode('utf-8'))
            del in_row
        del in_rows
        psr_obj.insert_map(order_obj.id, 'GEOL',
                           order_obj.number + '_US_GEOLOGY.jpg', 1)

    end = timeit.default_timer()
    arcpy.AddMessage((' -- End generating PSR geology report. Duration:',
                      round(end - start, 4)))
예제 #18
0
    def execute(self, parameters, messages):
        """The source code of the tool."""

        # local variables and env
        ## workspace
        arcpy.env.workspace = "E:/gina/poker/pip"

        ## source data
        adnr_lo_shp = "E:/gina/poker/shp/wip/land_ownership_data/adnr_gls_dls_merge_20170823_v1.shp"
        pfrr_popn_places = "E:/gina/poker/shp/wip/popn_places_data/pokerflat_popn_places_gcs_wgs84_to_akalbers_2.shp"
        dot_rds = "E:/gina/poker/shp/asgdc_data/mv_dot_centerline_route_ln.shp"
        infra_trails = "E:/gina/poker/shp/asgdc_data/mv_infra_trail_ln.shp"
        rs2477_trails = "E:/gina/poker/shp/asgdc_data/mv_rs2477_ln.shp"
        infra_airstrips = "E:/gina/poker/shp/asgdc_data/mv_infra_airstrip_pt.shp"
        runways = "E:/gina/poker/shp/asgdc_data/mv_airport_runway_pt.shp"

        ## pip seed table
        pipTable = "E:/gina/poker/dbf/predicted_impact_xy.dbf"

        ## pip output data
        pip_point_shp = "E:/gina/poker/pip/pip_point.shp"
        pip_point_3338 = "E:/gina/poker/pip/pip_point_3338.shp"
        pip_buffer_shp = "E:/gina/poker/pip/pip_buffer.shp"
        pip_lo_in_buffer_shp = "E:/gina/poker/pip/pip_lo_in_buffer.shp"  # 1
        pip_lo_in_buf_sum_dbf = "E:/gina/poker/pip/pip_lo_in_buf_sum.dbf"
        pip_lo_in_buf_sum_csv = "E:/gina/poker/pip/pip_lo_in_buf_sum.csv"
        pip_popn_places_in_buffer_shp = "E:/gina/poker/pip/pip_popn_places_in_buffer.shp"  # 2
        pip_roads_in_buffer_shp = "E:/gina/poker/pip/pip_roads_in_buffer.shp"  # 3
        pip_rs2477_in_buffer_shp = "E:/gina/poker/pip/pip_rs2477_in_buffer.shp"  # 4
        pip_infra_trails_in_buffer_shp = "E:/gina/poker/pip/pip_infra_trails_in_buffer.shp"  # 5
        pip_infra_airstrips_in_buffer_shp = "E:/gina/poker/pip/pip_infra_airstrips_in_buffer.shp"  # 6
        pip_runways_in_buffer_shp = "E:/gina/poker/pip/pip_runways_in_buffer.shp"  # 7
        pipLayer = "pipLayer"

        ## pip buffer params
        x = parameters[0].valueAsText
        y = parameters[1].valueAsText
        r = parameters[2].valueAsText + " NauticalMiles"

        ## target coord sys
        srs = arcpy.SpatialReference("Alaska Albers Equal Area Conic")

        ## intersect arrays
        intersect_lo = [adnr_lo_shp, pip_buffer_shp]  # 1
        intersect_pp = [pfrr_popn_places, pip_buffer_shp]  # 2
        intersect_rd = [dot_rds, pip_buffer_shp]  # 3
        intersect_tr = [infra_trails, pip_buffer_shp]  # 4
        intersect_rs = [rs2477_trails, pip_buffer_shp]  # 5
        intersect_as = [infra_airstrips, pip_buffer_shp]  # 6
        intersect_rw = [runways, pip_buffer_shp]  # 7

        ## map document and dataframe
        mxd = arcpy.mapping.MapDocument("current")
        dataframe = arcpy.mapping.ListDataFrames(mxd)[0]

        ## symbology layer files
        sourcePipSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/pip.lyr")
        sourceLoSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/lo.lyr")  # 1
        sourceTrSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/tr.lyr")  # 2
        sourcePpSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/pp.lyr")  # 3
        sourceRdSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/rd.lyr")  # 4
        sourceRsSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/rs.lyr")  # 5
        sourceAsSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/as.lyr")  # 6
        sourceRwSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/rw.lyr")  # 7

        # Process: Calculate Lon Field
        arcpy.CalculateField_management(pipTable, "Lon", x, "PYTHON", "")

        # Process: Calculate Lat Field
        arcpy.CalculateField_management(pipTable, "Lat", y, "PYTHON", "")

        # Process: Make XY Event Layer
        arcpy.MakeXYEventLayer_management(
            pipTable, "Lon", "Lat", pipLayer,
            "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision",
            "")

        # Process: Copy Features
        arcpy.CopyFeatures_management(pipLayer, pip_point_shp, "", "0", "0",
                                      "0")

        # Process: Project pip point
        arcpy.Project_management(pip_point_shp, pip_point_3338, srs)

        # Process: Buffer pip point
        arcpy.Buffer_analysis(pip_point_3338, pip_buffer_shp, r, "FULL",
                              "ROUND", "NONE", "", "PLANAR")

        # Process: Intersect pip buffer with land ownership
        arcpy.Intersect_analysis(intersect_lo, pip_lo_in_buffer_shp, "ALL", "",
                                 "INPUT")  # 1

        # Process: Intersect pip buffer with popn places
        arcpy.Intersect_analysis(intersect_pp, pip_popn_places_in_buffer_shp,
                                 "ALL", "", "INPUT")  # 2

        # Process: Intersect pip buffer with road_centerlines
        arcpy.Intersect_analysis(intersect_rd, pip_roads_in_buffer_shp, "ALL",
                                 "", "INPUT")  # 3

        # Process: Intersect pip buffer with rs2477 trails
        arcpy.Intersect_analysis(intersect_rs, pip_rs2477_in_buffer_shp, "ALL",
                                 "", "INPUT")  # 4

        # Process: Intersect pip buffer with infra trails
        arcpy.Intersect_analysis(intersect_tr, pip_infra_trails_in_buffer_shp,
                                 "ALL", "", "INPUT")  # 5

        # Process: Intersect pip buffer with infra airstrips
        arcpy.Intersect_analysis(intersect_as,
                                 pip_infra_airstrips_in_buffer_shp, "ALL", "",
                                 "INPUT")  # 6

        # Process: Intersect pip buffer with runways
        arcpy.Intersect_analysis(intersect_rw, pip_runways_in_buffer_shp,
                                 "ALL", "", "INPUT")  # 7

        # Process: Make feature layers and add to the map
        ## pip layer
        arcpy.MakeFeatureLayer_management(pip_point_3338,
                                          "Predicted Impact Point")
        addPipPointLayer = arcpy.mapping.Layer("Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, addPipPointLayer)

        ## pip land ownership layer
        arcpy.MakeFeatureLayer_management(
            pip_lo_in_buffer_shp,
            "Land Ownership within 3sigma of Predicted Impact Point")  # 1
        add3sigmaLoLayer = arcpy.mapping.Layer(
            "Land Ownership within 3sigma of Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, add3sigmaLoLayer)

        ## pip populated places layer
        popn_places_records = int(
            arcpy.GetCount_management(pip_popn_places_in_buffer_shp).getOutput(
                0))  # 2
        if popn_places_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_popn_places_in_buffer_shp,
                "Populated Places within 3sigma of Predicted Impact Point")
            addPipPopnPlacesLayer = arcpy.mapping.Layer(
                "Populated Places within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipPopnPlacesLayer)

        ## pip road centerlines layer
        rd_centers_records = int(
            arcpy.GetCount_management(pip_roads_in_buffer_shp).getOutput(
                0))  # 3
        if rd_centers_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_roads_in_buffer_shp,
                "Roads within 3sigma of Predicted Impact Point")
            addPipRoadsLayer = arcpy.mapping.Layer(
                "Roads within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipRoadsLayer)

        ## pip rs2477 trails layer
        rs2477_records = int(
            arcpy.GetCount_management(pip_rs2477_in_buffer_shp).getOutput(
                0))  # 4
        if rs2477_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_rs2477_in_buffer_shp,
                "RS2477 Trails within 3sigma of Predicted Impact Point")
            addPiprR2477Layer = arcpy.mapping.Layer(
                "RS2477 Trails within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipRoadsLayer)

        ## pip infra trails layer
        infra_trails_records = int(
            arcpy.GetCount_management(
                pip_infra_trails_in_buffer_shp).getOutput(0))  # 5
        if infra_trails_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_infra_trails_in_buffer_shp,
                "Other Trails within 3sigma of Predicted Impact Point")
            addPipOtherTrailsLayer = arcpy.mapping.Layer(
                "Other Trails within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipOtherTrailsLayer)

        ## pip infra airstrips layer
        infra_airstrips_records = int(
            arcpy.GetCount_management(
                pip_infra_airstrips_in_buffer_shp).getOutput(0))  # 6
        if infra_airstrips_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_infra_airstrips_in_buffer_shp,
                "Airstrips within 3sigma of Predicted Impact Point")
            addPipAirstripsLayer = arcpy.mapping.Layer(
                "Airstrips within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipAirstripsLayer)

        ## pip runways layer
        runways_records = int(
            arcpy.GetCount_management(pip_runways_in_buffer_shp).getOutput(
                0))  # 7
        if runways_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_runways_in_buffer_shp,
                "Runways within 3sigma of Predicted Impact Point")
            addPipRunwaysLayer = arcpy.mapping.Layer(
                "Runways within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipRunwaysLayer)

        # Add and calc Acres field for intersected Land Ownership
        arcpy.AddField_management(pip_lo_in_buffer_shp, "Acres", "DOUBLE")
        arcpy.CalculateField_management(pip_lo_in_buffer_shp, "Acres",
                                        "!shape.area@acres!", "PYTHON_9.3", "")

        # Summarize intersected Land Ownership by Owner and total Acres
        arcpy.Statistics_analysis(pip_lo_in_buffer_shp, pip_lo_in_buf_sum_dbf,
                                  "Acres SUM", "OWNER")
        # arcpy.MakeTableView_management(pip_lo_in_buf_sum_dbf)
        add3sigmaLoSumTbl = arcpy.mapping.TableView(pip_lo_in_buf_sum_dbf)
        arcpy.mapping.AddTableView(dataframe, add3sigmaLoSumTbl)

        # Symbolize and Refresh
        ## pip layer
        pip_layer = arcpy.mapping.ListLayers(mxd, "*Predicted Impact Point*",
                                             dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, pip_layer,
                                  sourcePipSymbologyLayer, True)

        ## land ownership layer
        lo_layer = arcpy.mapping.ListLayers(
            mxd, "*Land Ownership within 3sigma of Predicted Impact Point*",
            dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, lo_layer, sourceLoSymbologyLayer,
                                  True)
        lo_layer.symbology.addAllValues()

        ## populated places layer
        pp_layer = arcpy.mapping.ListLayers(
            mxd, "*Populated Places within 3sigma of Predicted Impact Point*",
            dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, pp_layer, sourcePpSymbologyLayer,
                                  True)

        ## road layer
        rd_layer = arcpy.mapping.ListLayers(
            mxd, "*Roads within 3sigma of Predicted Impact Point*",
            dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, rd_layer, sourceRdSymbologyLayer,
                                  True)

        ## rs2477 layer -- Errored out saying that the index is out of range :(
        # rs_layer = arcpy.mapping.ListLayers(mxd, "*RS2477 Trails within 3sigma of Predicted Impact Point*", dataframe)[0]
        # arcpy.mapping.UpdateLayer(dataframe, rs_layer, sourceRsSymbologyLayer, True)

        ## trails layer
        tr_layer = arcpy.mapping.ListLayers(
            mxd, "*Other Trails within 3sigma of Predicted Impact Point*",
            dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, tr_layer, sourceTrSymbologyLayer,
                                  True)

        ## airstrips layer -- Errored out saying that the index is out of range :(
        # as_layer = arcpy.mapping.ListLayers(mxd, "*Airstrips within 3sigma of Predicted Impact Point*", dataframe)[0]
        # arcpy.mapping.UpdateLayer(dataframe, as_layer, sourceAsSymbologyLayer, True)

        ## runways layer - commenting out to try to resolve the out of range errors :|
        # rw_layer = arcpy.mapping.ListLayers(mxd, "*Runways within 3sigma of Predicted Impact Point*", dataframe)[0]
        # arcpy.mapping.UpdateLayer(dataframe, rw_layer, sourceRwSymbologyLayer, True)

        arcpy.RefreshTOC()
        arcpy.RefreshActiveView()

        return
예제 #19
0
#L:\Workspace\ESA_Species\Step3\ToolDevelopment\TerrestrialGIS\CriticalHabitat\ShapeWebApp_CH

#L:\Workspace\ESA_Species\Step3\ToolDevelopment\TerrestrialGIS\Range\R_WebApp_Composite.gdb
#L:\Workspace\ESA_Species\Step3\ToolDevelopment\TerrestrialGIS\Range\ShapeWebApp_Range
arcpy.env.workspace = inCompGDB

fclist = arcpy.ListFeatureClasses()

start = datetime.datetime.now()
print "Started script at {0}".format(start)
for fc in fclist:
    fc_sp = fc.split('_')
    list_len = len(fc_sp)
    counter = 1
    outname = fc_sp[0]
    while counter < (list_len - 1):
        outname = outname + "_" + str(fc_sp[counter])

        counter += 1

    outfc = outfolder + os.sep + str(outname)

    if arcpy.Exists(outfc + '.shp'):
        print 'Already exported: {0}'.format(outfc)

    else:
        print outname
        arcpy.CopyFeatures_management(fc, outfc)
        print "Exported {0}".format(outfc)

print "Elapse time {0}".format((datetime.datetime.now()) - start)
예제 #20
0
    #all output classes need to be Z-aware
    arcpy.env.outputZFlag = 'Enabled'

    #write all copies out to the SR of the cross section line
    desc = arcpy.Describe(zmLine)
    arcpy.OutputCoordinateSystem = desc.SpatialReference

    for layer in featList:
        arcpy.AddMessage('Converting %s to 3D features' % layer)
        baseName = os.path.basename(layer)
        layCopy = os.path.join(scratchDir, baseName + '_copy')

        #make a copy in the scratch directory so that we can edit the geometries
        #of the features
        arcpy.CopyFeatures_management(layer, layCopy)

        #find out what kind of features we're dealing with
        shpType = arcpy.Describe(layer).ShapeType
        #arcpy.AddMessage('{} is a {} feature class'.format(layer, shpType))

        #special case of point feature type (fewer nested loops for the parts > vertices)
        #and we can edit the geometry directly
        if shpType == 'Point':
            #open an update cursor on the copy
            rows = arcpy.da.UpdateCursor(layCopy, ["SHAPE@XY", "SHAPE@Z"])

            for row in rows:
                #get the geometry of this point
                oldXY = row[0]
                oldX = oldXY[0]
예제 #21
0
# Select all the roads which overlap the garbage management area polygon
milieuzone_roads = arcpy.SelectLayerByLocation_management('Viales_Clip',
                                    'WITHIN_A_DISTANCE',
                                    'milieuzones',
                                    500,
                                    'NEW_SELECTION',
                                    'NOT_INVERT')

# Within selected features, further select only those roads that can be used by
busqueda = "fclass = 'living_street' OR fclass = 'motorway' OR fclass = 'motorway_link' OR fclass = 'primary' OR fclass = 'primary_link' OR fclass = 'residential' OR fclass = 'secondary' OR fclass = 'secondary_link' OR fclass = 'service' OR fclass = 'tertiary' OR fclass = 'tertiary_link' OR fclass = 'trunk' OR fclass = 'trunk_link' OR fclass = 'unclassified'"
arcpy.SelectLayerByAttribute_management(milieuzone_roads,
                             'SUBSET_SELECTION',
                             busqueda)

# Write the selected features to a new featureclass
arcpy.CopyFeatures_management(milieuzone_roads, 'Callejero')



"""
The following script will stablish the listed datasets as versioned
"""

# Import system modules
import arcpy

# Set local variables
Connection = "X:/TPFM_RDSC/FicherosConexionGDB/[email protected]/"
VerDatasets = ["Anidamientos",
               "Arboles",
               "Areas_caninas",
예제 #22
0
#   (generated by ArcGIS/ModelBuilder)
# Usage: landbouwschade_QgistoArcMap <LBS_tussenstap> <landbouwschade2018> 
# Description: 
# ---------------------------------------------------------------------------

# Import arcpy module
import arcpy

# Script arguments
LBS_tussenstap = arcpy.GetParameterAsText(0)
if LBS_tussenstap == '#' or not LBS_tussenstap:
    LBS_tussenstap = "R:\\GIS-CO\\QGIS-PROJECTEN\\Milieu\\geodata\\landbouwschade.gdb\\landbouwschade2018_tussenstap" # provide a default value if unspecified

landbouwschade2018 = arcpy.GetParameterAsText(1)
if landbouwschade2018 == '#' or not landbouwschade2018:
    landbouwschade2018 = "landbouwschade2018" # provide a default value if unspecified

# Local variables:
LBS_percelen = landbouwschade2018
LBS_percelenWGS = "R:\\GIS-CO\\QGIS-PROJECTEN\\Milieu\\geodata\\landbouwschade.gdb\\landbouwschade2018_Project"

# Process: Select Layer By Attribute
arcpy.SelectLayerByAttribute_management(landbouwschade2018, "NEW_SELECTION", "\"controle\" = 'ja' OR \"controle\" = 'nee'OR \"controle\" = 'gecontroleerd'")

# Process: Project
arcpy.Project_management(LBS_percelen, LBS_percelenWGS, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]", "Belge_1972_To_WGS_1984_3", "PROJCS['Belge_1972_Belgian_Lambert_72',GEOGCS['GCS_Belge 1972',DATUM['D_Belge_1972',SPHEROID['International_1924',6378388.0,297.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',150000.013],PARAMETER['false_northing',5400088.438],PARAMETER['central_meridian',4.367486666666666],PARAMETER['standard_parallel_1',49.8333339],PARAMETER['standard_parallel_2',51.16666723333333],PARAMETER['latitude_of_origin',90.0],UNIT['Meter',1.0]]", "NO_PRESERVE_SHAPE", "", "NO_VERTICAL")

# Process: Copy Features
arcpy.CopyFeatures_management(LBS_percelenWGS, LBS_tussenstap, "DEFAULTS", "0", "0", "0")

예제 #23
0
    def execute(self, parameters, messages):
        arcpy.env.overwriteOutput = True
        arcpy.CheckOutExtension('Spatial')
        arcpy.AddMessage("Orientation of species distributions")
        for param in parameters:
            arcpy.AddMessage("Parameter: %s = %s" %
                             (param.name, param.valueAsText))

        # Read in variables for the tool
        input_line = parameters[0].valueAsText
        input_points = parameters[1].valueAsText
        attribute_process = parameters[2].valueAsText
        flag_field = parameters[3].valueAsText
        distance = parameters[4].value
        angle = parameters[5].value
        output_directory = parameters[6].valueAsText
        clean_up = parameters[7].valueAsText

        # Make output directory if it does not exist
        output_directory.strip()
        arcpy.AddMessage(output_directory)

        if not os.path.exists(str(output_directory)):
            os.makedirs(output_directory)

        arcpy.env.workspace = output_directory

        # 0 Describe files to set coordinate systems
        desc_input = arcpy.Describe(input_points)
        coord_system = desc_input.spatialReference
        arcpy.env.outputCoordinateSystem = coord_system

        # 1 Convert island line to a polygon - numpy work around due to lack of license

        if not arcpy.Exists(os.path.join(output_directory, "Island_Poly.shp")):

            def polygon_to_line_no_gap(input_line_, output_polygon):
                array = arcpy.da.FeatureClassToNumPyArray(
                    input_line_, ["SHAPE@X", "SHAPE@Y"],
                    spatial_reference=coord_system,
                    explode_to_points=True)
                if array.size == 0:
                    arcpy.AddError(
                        "Line has no features, check to ensure it is OK")
                else:
                    array2 = arcpy.Array()
                    for x, y in array:
                        pnt = arcpy.Point(x, y)
                        array2.add(pnt)
                    polygon = arcpy.Polygon(array2)
                    arcpy.CopyFeatures_management(polygon, output_polygon)
                return

            polygon_to_line_no_gap(
                input_line, os.path.join(output_directory, "Island_Poly.shp"))

            # 2 Create Fishnet for random sampling of points within the cells of the net
            extent = arcpy.Describe(input_points).extent
            origin_coord = str(extent.XMin) + " " + str(extent.YMin)
            y_coord = str(extent.XMin) + " " + str(extent.YMin + 1)
            corner_coord = str(extent.XMax) + " " + str(extent.YMax)

            island_area = 0

            with arcpy.da.SearchCursor(
                    os.path.join(output_directory, "Island_Poly.shp"),
                    "SHAPE@") as rows:
                for row in rows:
                    island_area += row[0].getArea("GEODESIC",
                                                  "SQUAREKILOMETERS")

            island_area_polygon = sqrt(island_area * 0.1) * 100

            arcpy.AddMessage("....fishnet size is: " +
                             str(round(island_area_polygon, 2)) + " m x " +
                             str(round(island_area_polygon, 2)) +
                             " m. Island area is: " +
                             str(round(island_area, 0)) + " km2.")

            arcpy.CreateFishnet_management(out_feature_class=os.path.join(
                output_directory, "Fishnet.shp"),
                                           origin_coord=origin_coord,
                                           y_axis_coord=y_coord,
                                           cell_width=island_area_polygon,
                                           cell_height=island_area_polygon,
                                           number_rows="",
                                           number_columns="",
                                           corner_coord=corner_coord,
                                           labels="",
                                           template="",
                                           geometry_type="POLYGON")

            arcpy.Intersect_analysis(
                in_features=os.path.join(output_directory, "Fishnet.shp") +
                " #;" + os.path.join(output_directory, "Island_Poly.shp") +
                " #",
                out_feature_class=os.path.join(output_directory,
                                               "FishClip.shp"),
                join_attributes="ONLY_FID",
                cluster_tolerance="-1 Unknown",
                output_type="INPUT")

            arcpy.DefineProjection_management(
                os.path.join(output_directory, "FishClip.shp"), coord_system)

            arcpy.AddField_management(
                os.path.join(output_directory, "FishClip.shp"), "Shape_Area",
                "DOUBLE")

            arcpy.CalculateField_management(
                os.path.join(output_directory, "FishClip.shp"), "Shape_Area",
                "!SHAPE.AREA@SQUAREMETERS!", "PYTHON_9.3")

            maxvalue = arcpy.SearchCursor(
                os.path.join(output_directory, "FishClip.shp"), "", "", "",
                "Shape_Area" + " D").next().getValue("Shape_Area")

            maxvalue = str(int(maxvalue - 1))

            where = '"Shape_Area" > ' + "%s" % maxvalue
            arcpy.Select_analysis(
                in_features=os.path.join(output_directory, "FishClip.shp"),
                out_feature_class=os.path.join(output_directory,
                                               "FishClipInner.shp"),
                where_clause=where)

            # 3 Create n random points within the cells of the fishnet
            arcpy.CreateRandomPoints_management(
                out_path=output_directory,
                out_name="RndPts.shp",
                constraining_feature_class=os.path.join(
                    output_directory, "FishClipInner.shp"),
                constraining_extent="0 0 250 250",
                number_of_points_or_field="5",
                minimum_allowed_distance="0 Meters",
                create_multipoint_output="POINT",
                multipoint_size="0")

            arcpy.DefineProjection_management(
                os.path.join(output_directory, "RndPts.shp"), coord_system)

        else:
            arcpy.AddMessage(
                "....skipping building polygons as they already exist")

        # 3 Create spatial bootstrapping circle polygons
        rows = arcpy.SearchCursor(os.path.join(output_directory, "RndPts.shp"))
        desc = arcpy.Describe(os.path.join(output_directory, "RndPts.shp"))
        shapefieldname = desc.ShapeFieldName

        if not arcpy.Exists(os.path.join(output_directory, "SectorPoly.shp")):
            arcpy.AddMessage("....now conducting spatial bootstrap.")

            featureclass = os.path.join(output_directory, "SectorPoly.shp")
            arcpy.CreateFeatureclass_management(os.path.dirname(featureclass),
                                                os.path.basename(featureclass),
                                                "Polygon")
            arcpy.AddField_management(featureclass, str("FID_Fishne"), "TEXT",
                                      "", "", "150")
            arcpy.AddField_management(featureclass, "BEARING", "SHORT", "", "",
                                      "4")
            arcpy.DeleteField_management(featureclass, ["Id"])
            arcpy.DefineProjection_management(featureclass, coord_system)

            finalfeatureclass = os.path.join(output_directory, "Final.shp")

            arcpy.CreateFeatureclass_management(
                os.path.dirname(finalfeatureclass),
                os.path.basename(finalfeatureclass), "Polygon")
            arcpy.AddField_management(finalfeatureclass, str("FID_Fishne"),
                                      "TEXT", "", "", "150")
            arcpy.AddField_management(finalfeatureclass, "BEARING", "SHORT",
                                      "", "", "4")
            arcpy.DeleteField_management(finalfeatureclass, ["Id"])
            arcpy.DefineProjection_management(finalfeatureclass, coord_system)

            featureclass_in_mem = arcpy.CreateFeatureclass_management(
                "in_memory", "featureclass_in_mem", "Polygon")
            arcpy.AddField_management(featureclass_in_mem, "OriginID", "TEXT",
                                      "", "", "150")
            arcpy.AddField_management(featureclass_in_mem, "BEARING", "SHORT",
                                      "", "", "4")
            arcpy.DeleteField_management(featureclass_in_mem, ["Id"])
            arcpy.DefineProjection_management(featureclass_in_mem,
                                              coord_system)

            for row in rows:
                angles = range(0, 360, angle)
                feat = row.getValue(shapefieldname)
                columnValue = row.getValue(str("FID"))
                pnt = feat.getPart()
                origin_x = pnt.X
                origin_y = pnt.Y

                for ang in angles:
                    angleorigin = float(int(ang))
                    # Point 1
                    (disp_x, disp_y) = (distance * sin(radians(angleorigin)),
                                        distance * cos(radians(angleorigin)))
                    (end_x, end_y) = (origin_x + disp_x, origin_y + disp_y)
                    # Point 2
                    anglestep = float(int(ang) + int(angle))
                    (disp2_x, disp2_y) = (distance * sin(radians(anglestep)),
                                          distance * cos(radians(anglestep)))
                    (end2_x, end2_y) = (origin_x + disp2_x, origin_y + disp2_y)

                    # Create a polygon geometry
                    array = arcpy.Array([
                        arcpy.Point(origin_x, origin_y),
                        arcpy.Point(end_x, end_y),
                        arcpy.Point(end2_x, end2_y),
                    ])
                    polygon = arcpy.Polygon(array)

                    with arcpy.da.InsertCursor(
                            featureclass_in_mem,
                        ['OriginID', 'BEARING', 'SHAPE@']) as cur:
                        cur.insertRow([columnValue, ang, polygon])

                    array.removeAll()

            arcpy.CopyFeatures_management(r"in_memory\featureclass_in_mem",
                                          featureclass)
        else:
            arcpy.AddMessage("....using previous spatial bootstrap.")

        arcpy.AddMessage("....now joining with observations")

        query = '"' + str(flag_field) + '" = ' + str(0)
        arcpy.MakeFeatureLayer_management(input_points,
                                          "input_points_query_sub")
        arcpy.Select_analysis("input_points_query_sub",
                              r"in_memory/input_points_query", query)

        count_records = arcpy.GetCount_management(
            r"in_memory/input_points_query").getOutput(0)
        arcpy.AddMessage("....total number of records to process: " +
                         str(count_records))

        if int(count_records) > 500:
            arcpy.AddMessage(
                "....spatial join will fail due to memory error, working around this limitation..."
            )
            count_records = arcpy.GetCount_management(
                os.path.join(output_directory, "SectorPoly.shp")).getOutput(0)
            query_1_range = '"' + str("FID") + '" <= ' + str(
                int(count_records) / 4)
            query_2_range = '"' + str("FID") + '" > ' + str(
                int(count_records) / 4) + ' And "' + str("FID") + '" < ' + str(
                    int(count_records) / 2)
            query_3_range = '"' + str("FID") + '" >= ' + str(
                int(count_records) / 2) + ' And "' + str("FID") + '" < ' + str(
                    int(count_records) / 2 + int(count_records) / 4)
            query_4_range = '"' + str("FID") + '" >= ' + str(
                int(count_records) / 2 + int(count_records) / 4)

            query_list = [
                query_1_range, query_2_range, query_3_range, query_4_range
            ]
            count = 1

            for i in query_list:
                if not arcpy.Exists(
                        os.path.join(output_directory,
                                     "SectorPoly" + str(count) + ".shp")):
                    arcpy.Select_analysis(
                        os.path.join(output_directory, "SectorPoly.shp"),
                        os.path.join(output_directory,
                                     "SectorPoly" + str(count) + ".shp"), i)

                    arcpy.SpatialJoin_analysis(
                        os.path.join(output_directory,
                                     "SectorPoly" + str(count) + ".shp"),
                        r"in_memory/input_points_query",
                        os.path.join(output_directory,
                                     "SpatialJoin" + str(count) + ".shp"),
                        "JOIN_ONE_TO_MANY", "KEEP_ALL", "", "INTERSECT")
                    with arcpy.da.UpdateCursor(
                            os.path.join(output_directory,
                                         "SpatialJoin" + str(count) + ".shp"),
                            "Join_Count") as cursor:
                        for row in cursor:
                            if row[0] == 0:
                                cursor.deleteRow()

                if not arcpy.Exists(
                        os.path.join(output_directory,
                                     "SpatialJoin" + str(count) + ".csv")):
                    dbf2csv(
                        os.path.join(output_directory,
                                     "SpatialJoin" + str(count) + ".dbf"),
                        os.path.join(output_directory,
                                     "SpatialJoin" + str(count) + ".csv"))

                count += 1
        else:
            arcpy.SpatialJoin_analysis(
                os.path.join(output_directory, "SectorPoly.shp"),
                r"in_memory/input_points_query",
                r"in_memory/points_SpatialJoin", "JOIN_ONE_TO_MANY",
                "KEEP_ALL", "", "INTERSECT")

            with arcpy.da.UpdateCursor(r"in_memory/points_SpatialJoin",
                                       "Join_Count") as cursor:
                for row in cursor:
                    if row[0] == 0:
                        cursor.deleteRow()

            arcpy.CopyFeatures_management(
                r"in_memory/points_SpatialJoin",
                os.path.join(
                    output_directory,
                    os.path.splitext(os.path.basename(input_points))[0] +
                    "_join.shp"))

        attribute_process = attribute_process.split(",")

        if arcpy.Exists(r"in_memory/points_SpatialJoin"):
            for i in attribute_process:
                arcpy.AddMessage("....calculating statistics for " + str(i))
                stats = [[i, "MEAN"], [i, "STD"]]
                arcpy.Statistics_analysis(
                    r"in_memory/points_SpatialJoin",
                    os.path.join(
                        output_directory,
                        os.path.splitext(os.path.basename(input_points))[0] +
                        "_" + i + ".dbf"), stats, "BEARING")

        else:

            header_saved = False

            if not arcpy.Exists(
                    os.path.join(output_directory,
                                 "SpatialJoin_Merge" + ".csv")):

                with open(
                        os.path.join(output_directory,
                                     "SpatialJoin_Merge" + ".csv"),
                        'wb') as fout:
                    for num in range(1, 5):
                        with open(
                                os.path.join(output_directory, "SpatialJoin" +
                                             str(num) + ".csv")) as fin:
                            header = next(fin)
                            if not header_saved:
                                fout.write(header)
                                header_saved = True
                            for line in fin:
                                fout.write(line)

            for i in attribute_process:
                arcpy.AddMessage("....calculating statistics for " + str(i) +
                                 " using pandas1.")
                chunks = pd.read_csv(os.path.join(
                    output_directory, "SpatialJoin_Merge" + ".csv"),
                                     chunksize=100000)
                pieces = [
                    x.groupby('BEARING',
                              as_index=False)[i].agg(['count', 'mean', 'std'])
                    for x in chunks
                ]
                result = pd.concat(pieces)
                result.columns = result.columns.droplevel(0)
                result = result.reset_index()
                name_mean = "MEAN_" + str(i)
                name_std = "STD_" + str(i)
                result.rename(columns={'count': 'FREQUENCY'}, inplace=True)
                result.rename(columns={'mean': name_mean[0:10]}, inplace=True)
                result.rename(columns={'std': name_std[0:10]}, inplace=True)

                f = {
                    'FREQUENCY': ['sum'],
                    name_mean[0:10]: ['mean'],
                    name_std[0:10]: ['mean']
                }

                result_2 = result.groupby('BEARING').agg(f)

                result_2 = result_2.reset_index()

                result_2 = result_2[[
                    'BEARING', 'FREQUENCY', name_mean[0:10], name_std[0:10]
                ]]

                result_2.to_csv(os.path.join(
                    output_directory,
                    os.path.splitext(os.path.basename(input_points))[0] + "_" +
                    i + ".csv"),
                                index=False)

                if os.path.exists(
                        os.path.join(
                            output_directory,
                            os.path.splitext(os.path.basename(input_points))[0]
                            + "_" + i + ".csv")):
                    with open(
                            os.path.join(
                                output_directory,
                                os.path.splitext(
                                    os.path.basename(input_points))[0] + "_" +
                                i + ".csv"), "r") as f:
                        reader = list(csv.reader(f, delimiter=","))
                        reader.pop(1)
                        reader.pop(1)
                        with open(
                                os.path.join(
                                    output_directory,
                                    os.path.splitext(
                                        os.path.basename(input_points))[0] +
                                    "_" + i + ".csv"), "w") as out:
                            writer = csv.writer(out, delimiter=",")
                            for row in reader:
                                writer.writerow(row)

                result = arcpy.TableToDBASE_conversion(
                    os.path.join(
                        output_directory,
                        os.path.splitext(os.path.basename(input_points))[0] +
                        "_" + i + ".csv"), output_directory)

        try:
            arcpy.Delete_management(r"in_memory/points_SpatialJoin")
            arcpy.Delete_management(r"in_memory/input_points_query")
        except:
            pass

        if clean_up == "true":
            arcpy.Delete_management(
                os.path.join(output_directory, "Island_Line.shp"))
            arcpy.CopyFeatures_management(
                os.path.join(output_directory, "Island_Poly.shp"),
                os.path.join(
                    output_directory,
                    os.path.splitext(os.path.basename(input_points))[0] +
                    "_poly.shp"))
            arcpy.Delete_management(
                os.path.join(output_directory, "Island_Poly.shp"))
            arcpy.Delete_management(
                os.path.join(output_directory, "SectorPoly.shp"))
            arcpy.Delete_management(
                os.path.join(output_directory, "Fishnet.shp"))
            arcpy.Delete_management(
                os.path.join(output_directory, "Fishnet_label.shp"))
            arcpy.Delete_management(
                os.path.join(output_directory, "FishClip.shp"))
            arcpy.Delete_management(
                os.path.join(output_directory, "FishClipInner.shp"))
            arcpy.Delete_management(
                os.path.join(output_directory, "RndPts.shp"))
            if int(count_records) > 500:
                arcpy.Delete_management(
                    os.path.join(output_directory, "SectorPoly1" + ".shp"))
                arcpy.Delete_management(
                    os.path.join(output_directory, "SectorPoly2" + ".shp"))
                arcpy.Delete_management(
                    os.path.join(output_directory, "SectorPoly3" + ".shp"))
                arcpy.Delete_management(
                    os.path.join(output_directory, "SectorPoly4" + ".shp"))
                arcpy.Delete_management(
                    os.path.join(output_directory, "SpatialJoin1" + ".shp"))
                arcpy.Delete_management(
                    os.path.join(output_directory, "SpatialJoin2" + ".shp"))
                arcpy.Delete_management(
                    os.path.join(output_directory, "SpatialJoin3" + ".shp"))
                arcpy.Delete_management(
                    os.path.join(output_directory, "SpatialJoin4" + ".shp"))

        arcpy.AddMessage("....completed: " +
                         os.path.splitext(os.path.basename(input_points))[0] +
                         ".")
        arcpy.CheckInExtension('Spatial')
        return
예제 #24
0
        if arcpy.Exists(points):
            arcpy.SelectLayerByAttribute_management(points, "CLEAR_SELECTION",
                                                    "")

        # ----------------------------------------------------- Shapefile Outputs
        stationsOut = outputFolder + os.sep + "StationPoints.shp"
        rStationsOut = outputFolder + os.sep + "RidgeStationPoints.shp"
        tileOut = outputFolder + os.sep + "TileLines.shp"
        ridgeOut = outputFolder + os.sep + "RidgeLines.shp"
        pointsOut = outputFolder + os.sep + "StakeoutPoints.shp"
        linesOut = outputFolder + os.sep + "ReferenceLines.shp"

        # ------------------------------------------------------------ Copy FC's to Shapefiles
        AddMsgAndPrint("\nCopying GPS layers to output Folder", 0)
        if arcpy.Exists(stationPoints):
            arcpy.CopyFeatures_management(stationPoints, stationsOut)
        else:
            AddMsgAndPrint(
                "\nUnable to find Station Points in project workspace. Copy failed. Export them manually.",
                1)

        if arcpy.Exists(rStationPoints):
            arcpy.CopyFeatures_management(rStationPoints, rStationsOut)
        else:
            AddMsgAndPrint(
                "\nUnable to find Ridge Station Points in project workspace. Copy failed. Export them manually.",
                1)

        if arcpy.Exists(tileLines):
            arcpy.CopyFeatures_management(tileLines, tileOut)
        else:
예제 #25
0
def species(gdb, uttl, sp, epsg, id_join, config_file):

    results_folder = os.path.dirname(gdb)
    temp_folder = os.path.join(results_folder, 'temp')
    species_proj = os.path.join(temp_folder, 'species.tif')

    arcpy.env.workspace = results_folder
    arcpy.env.overwriteOutput = True

    if arcpy.Exists(os.path.join(gdb, 'Species_Table')):
        try:
            arcpy.Delete_management(os.path.join(gdb, 'Species_Table'))
            arcpy.DeleteField_management(
                uttl, [u'sp_mean', u'sp_value', u'sp_class'])
        except ImportError:
            pass

    xls_file = pd.ExcelFile(config_file)
    df_criteria = xls_file.parse('Especies_Sensibles', index_col='Clases')
    low_lim = df_criteria.ix['Baja', 'Superior']
    high_lim = df_criteria.ix['Alta', 'Inferior']

    arcpy.ProjectRaster_management(sp, species_proj, epsg, 'NEAREST')
    zonal_stats(uttl, species_proj, 'sp_mean')

    table2csv(uttl, os.path.join(temp_folder, 'species.csv'))
    arcpy.DeleteField_management(uttl, [u'sp_mean'])
    df_spe = pd.read_csv(os.path.join(temp_folder, 'species.csv'),
                         index_col=id_join)

    df_spe['sp_mean'].fillna(value=0, inplace=True)
    df_spe['sp_mean'] = df_spe['sp_mean'] * 100.
    df_spe['sp_value'] = df_criteria.ix['Media', 'Value']
    df_spe.ix[df_spe[df_spe['sp_mean'] <= low_lim].index,
              'sp_value'] = df_criteria.ix['Baja', 'Value']
    df_spe.ix[df_spe[df_spe['sp_mean'] > high_lim].index,
              'sp_value'] = df_criteria.ix['Alta', 'Value']

    df_spe['sp_class'] = 'Media'
    df_spe.ix[df_spe[df_spe['sp_mean'] <= low_lim].index, 'sp_class'] = 'Baja'
    df_spe.ix[df_spe[df_spe['sp_mean'] > high_lim].index, 'sp_class'] = 'Alta'

    df_join = df_spe[['sp_mean', 'sp_value', 'sp_class']].copy()

    df_join.to_csv(os.path.join(temp_folder, 'Species_Table_Join.csv'))

    arcpy.TableToTable_conversion(
        os.path.join(temp_folder, 'Species_Table_Join.csv'), gdb,
        'Species_Table')

    expression = 'str(!Name!)'
    code_block = ''
    arcpy.AddField_management(os.path.join(gdb,
                                           'Species_Table'), 'Code', 'TEXT',
                              '', '', '10', '', 'NULLABLE', 'NON_REQUIRED', '')
    arcpy.CalculateField_management(os.path.join(gdb, 'Species_Table'), 'Code',
                                    expression, 'PYTHON', code_block)

    arcpy.MakeFeatureLayer_management(uttl, 'UTTL')
    arcpy.AddJoin_management('UTTL', 'Name',
                             os.path.join(gdb, 'Species_Table'), 'Code')

    arcpy.CopyFeatures_management('UTTL',
                                  os.path.join(gdb, r'UTTL_Basins_Species'))
    arcpy.Delete_management('UTTL')
    arcpy.Delete_management(uttl)

    arcpy.Rename_management(os.path.join(gdb, r'UTTL_Basins_Species'), uttl)
    clear_layers()
    rename_fields(os.path.join(gdb, r'UTTL_Basins'))
    rename_fields(os.path.join(gdb, r'UTTL_Basins'), r'Species_Table')

    base_name = ['OBJECTID_1', 'Name_1', 'Code']
    arcpy.DeleteField_management(uttl, [i for i in base_name])
        arcpy.env.workspace = gdb #--change working directory to each GDB in list
        arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(r"C:\Data\WWF\Processing\WWF_5min_grid_Moll.prj")# make sure your outputs will be in equal area proj
        fclist = arcpy.ListFeatureClasses()  #list the fc in your GDB
        for fc in fclist:
            for i in BiomeNameList:
                if fc==i:  # YOUR GDB MUST HAVE A FEATURE CLASS OF THE BIOME EXTENT IN THE GDB WITH EXACT SAME FILE NAME AS THE BIOME NAME, ELSE THIS WON'T WORK                 
                    out_grid= gdb + "\\" + fc + "_roads_layer" # create name for your output line feature
# Then, make a layer from the line feature class
                    arcpy.MakeFeatureLayer_management(in_grid,out_grid)
# Select roads from the global layer which overlap the Biome polygon and create a feature layer for each new selection (stored in the respective Biome gdb)
                    try:
                        # select linear features which intersect the biome extent
                        arcpy.SelectLayerByLocation_management(out_grid, 'intersect', fc)
                        print "Selecting overlapping roads from " + "%s" %fc
                        # copy those features to a new feature class
                        arcpy.CopyFeatures_management(out_grid, fc + "_roads")
                        print "writing the overlapping roads to a new feature class " +  "%s" %fc + "_roads"
                    except:
                        print "fail"

            #New loop for tabulating intersection, i.e. to get length of linear feature in the grid cell          
            for t in BiomeNameList_fishnet:
                if fc in str (t): # find the fishnet which matches the biome name
                in_zone_features=t
                zone_fields="OBJECTID"
                in_class_features = str(t).replace("fishnet","roads")#line fc created in above step
                out_table=gdb + "\\" + fc + "_f"
                class_fields="#"
                sum_fields="LENGTH_KM"
                xy_tolerance="#"
                out_units="KILOMETERS"
### Initialization
# load libraries
import arcpy
import os
import sys
import datetime
import toml

### Preliminary processing
# load parameters
with open("code/parameters/general.toml") as conffile:
    general_params = toml.loads(conffile.read())

# set environmental variables
arcpy.env.parallelProcessingFactor = general_params['threads']
arcpy.env.overwriteOutput = True

# get date string
current_date = os.listdir('data/raw/WDPA_geodatabase')[0].split('_')[1]

### Main processing
# extract data from geodatabase
arcpy.CopyFeatures_management(
    'data/raw/WDPA_geodatabase/WDPA_' + current_date + '_Public/WDPA_' +
    current_date + '_Public.gdb/WDPA_' + sys.argv[1] + '_' + current_date,
    sys.argv[2])
예제 #28
0
        ArcHydroTools.CatchmentGridDelineation(flow_dir, stream_link, catchment_grid)

        arcpy.AddMessage("\nCatchment polygons...")
        ArcHydroTools.CatchmentPolyProcessing(catchment_grid, catchment_poly)

        arcpy.AddMessage("\nDrainage lines...")
        ArcHydroTools.DrainageLineProcessing(stream_link, flow_dir, drainage_line)

        arcpy.AddMessage("\nAdjoint catchments...")
        ArcHydroTools.AdjointCatchment(drainage_line, catchment_poly, adjoint_catchment)

        # Prepare user-input outlets to conform with ArcHydro's requirements
        # for batch points

        arcpy.AddMessage("\nPreparing batch points...")
        arcpy.CopyFeatures_management(outlets, batchpoints)

        for bp_field, field_info in bp_field_dict.iteritems():
            field_type = field_info[0]
            field_default = field_info[1]
            
            # Add fields if they do not exist in the input outlets file
            if (len(arcpy.ListFields(batchpoints, bp_field)) == 0):
                arcpy.AddField_management(batchpoints, bp_field, field_type)
                
            # Set field values
            # Name = blank (to start - will set to user input field)
            # Descript = blank (to start - will also set to user input field)
            # BatchDone = 0 (do need to process this point)
            # SnapOn = 1 (do snap point to closest stream)
            # SrcType = 0 (0 = outlet, 1 = inlet)
예제 #29
0
 def copyfeatures(self, ofile):
     arcpy.CopyFeatures_management(self.fname, ofile.fname)
     DisplayMessages()
예제 #30
0
edit.stopEditing(True)
unbuiltrelations.close()
del areawaycursor
arcpy.AddMessage("Complete multiAreas=" + str(completerels))
arcpy.AddMessage("Step 6.5 --- %s seconds ---" % (time.time() - stepstarttime))

#Step 7 join Attributes to ways
stepstarttime = time.time()
arcpy.AddMessage('Step 7/7')
arcpy.AddMessage("joining attributes to way features")
arcpy.MakeFeatureLayer_management(
    wayfc, "tempway", "", "",
    "Shape_Length Shape_Length VISIBLE;Way_ID Way_ID VISIBLE")
arcpy.AddJoin_management("tempway", "Way_ID", waytagtab, "Way_ID",
                         "KEEP_COMMON")
arcpy.CopyFeatures_management("tempway", finalwayfc, "", "0.05", "0.5", "5.0")
arcpy.Delete_management("tempway")
arcpy.Delete_management(wayfc)
arcpy.AddMessage("joining attributes to area features")
arcpy.MakeFeatureLayer_management(
    areawayfc, "temparea", "", "",
    "Shape_Length Shape_Length VISIBLE;Way_ID Way_ID VISIBLE")
arcpy.AddJoin_management("temparea", "Way_ID", waytagtab, "Way_ID",
                         "KEEP_COMMON")
arcpy.CopyFeatures_management("temparea", finalareawayfc, "", "0.05", "0.5",
                              "5.0")
arcpy.Delete_management("temparea")
arcpy.Delete_management(areawayfc)
arcpy.Delete_management(waytagtab)

#Sort out some of the mess caused by loading all loops as areas.