Ejemplo n.º 1
0
def test_poi_corrcet(extracted_area,poi_region,out_path):
    #arcpy.Intersect_analysis([extracted_area,poi_region], 'temp.shp')
    arcpy.MakeFeatureLayer_management(poi_region,'selected_area')
    arcpy.SelectLayerByLocation_management('selected_area','CROSSED_BY_THE_OUTLINE_OF',extracted_area)
    arcpy.Erase_analysis('selected_area', extracted_area, 'temp.shp')
    arcpy.Union_analysis([extracted_area,'temp.shp'],'temp_1.shp')
    arcpy.Dissolve_management('temp_1.shp','temp_2')
    if os.path.isfile(workspace_path +'\\'+out_path):
        print('File exists.Overwrite the original shp')
        arcpy.Delete_management(out_path)
    arcpy.Erase_analysis('temp_2.shp', '\\data\\waterregion.shp', out_path)
    arcpy.Delete_management('temp.shp')
    arcpy.Delete_management('temp_1.shp')
    arcpy.Delete_management('temp_2.shp')
Ejemplo n.º 2
0
def geoProcess(datetime,
               province,
               target_area,
               density_cell="10",
               day_cell="15"):
    # if arcpy.Exists("lightningDay") and arcpy.Exists("lightningDensity"):
    #   return
    cwd = os.getcwd()
    # todo 新建数据库占用了十几秒的时间,可以考虑在之前并行处理
    workpath = ''.join([cwd, u"/temp/", province, '/', datetime])
    workspace = ''.join([workpath, '/', target_area, '.gdb'])
    if not arcpy.Exists(workspace):
        arcpy.CreateFileGDB_management(workpath, ''.join([target_area,
                                                          '.gdb']))

    arcpy.env.workspace = workspace
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference("WGS 1984")
    arcpy.env.overwriteOutput = True

    origin_data = ''.join([workpath, u'/GDB.gdb/data', datetime])
    infeature = ''.join([cwd, u'/data/LightningBulletin.gdb/', target_area])
    # 获取当前extent
    extents = getExtents(infeature)

    ####制作电闪密度插值点文件#######
    # 制作网格文件
    grid_feature = makeGrids(extents, density_cell)
    # 剪裁数据
    clip_feature = clipData(origin_data, grid_feature, density_cell)
    # 制作电闪密度插值点
    lightningDensity(clip_feature, grid_feature, density_cell)
    # 制作掩盖文件
    mask_feature = "densityMask"
    arcpy.Erase_analysis(grid_feature, infeature, mask_feature)

    ####制作电闪雷暴日插值点文件####
    # 制作网格文件
    grid_feature = makeGrids(extents, day_cell)
    # 剪裁数据
    clip_feature = clipData(origin_data, grid_feature, day_cell)
    # 制作电闪雷暴日插值点
    lightningDay(clip_feature, grid_feature, day_cell)
    # 制作掩盖文件
    mask_feature = "dayMask"
    arcpy.Erase_analysis(grid_feature, infeature, mask_feature)

    ####计算相关统计参数
    statsProcess(infeature, workspace)
Ejemplo n.º 3
0
 def processing(self):
     pg_frames = self.make_pg()
     self.get_pg(pg_frames)
     # Choose the erase shp
     areas_erase = []
     for i, area_level in enumerate(self.areas_level):
         if area_level <= self.max_level:  # Notice <= means has the higher weight
             areas_erase.append(self.areas[i])
     areas_erase_path = 'areas_erase.shp'
     erase_output_path = 'erase_' + self.output_path
     if len(areas_erase) > 0:  # Erase
         if os.path.isfile(self.workspace_path + '\\' + areas_erase_path):
             arcpy.Delete_management(areas_erase_path)
         arcpy.Merge_management(areas_erase, areas_erase_path)
         if os.path.isfile(self.workspace_path + '\\' + erase_output_path):
             arcpy.Delete_management(self.workspace_path + '\\' +
                                     erase_output_path)
         arcpy.Erase_analysis(self.output_path, areas_erase_path,
                              erase_output_path)
         zip_path = self.shp_to_zip(erase_output_path)
         arcpy.Delete_management(erase_output_path)
         arcpy.Delete_management(areas_erase_path)
     else:
         zip_path = self.shp_to_zip(self.output_path)
     arcpy.Delete_management(self.output_path)
     print('Frame done...')
     return zip_path
Ejemplo n.º 4
0
def InsideState(state, nwi, lakes, outfc):
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(102039)

    # Select only wetlands with their center inside this state
    # This way all wetlands will only be represented once when we merge all states

    arcpy.env.workspace = 'in_memory'
    arcpy.MakeFeatureLayer_management(nwi, "nwi_lyr")
    cu.multi_msg('Selecting wetlands with their center in the state.')
    arcpy.SelectLayerByLocation_management("nwi_lyr", 'HAVE_THEIR_CENTER_IN',
                                           state, '', 'NEW_SELECTION')

    # Two things to make wetlands conform to the CSI definition
    # Select only palustrine systems that aren't freshwater ponds
    # and make it impossible for wetlands to be inside lakes
    wetland_type_field = arcpy.ListFields("nwi_lyr", "WETLAND_TY*")[0].name
    filter = """"ATTRIBUTE" LIKE 'P%' AND {} <> 'Freshwater Pond'""".format(
        wetland_type_field)
    cu.multi_msg("Selecting only palustrine wetlands...")
    arcpy.SelectLayerByAttribute_management("nwi_lyr", "SUBSET_SELECTION",
                                            filter)
    cu.multi_msg('Erasing lakes from wetlands layer.')
    arcpy.Erase_analysis("nwi_lyr", lakes, outfc)

    # Add two fields we will use, an ID field and the area in hectares
    arcpy.AddField_management(outfc, "WET_ID", "LONG")
    arcpy.CalculateField_management(outfc, "WET_ID", "!OBJECTID!", "PYTHON")

    arcpy.AddField_management(outfc, "AreaHa", "DOUBLE")
    arcpy.CalculateField_management(outfc, "AreaHa", "!shape.area@hectares!",
                                    "PYTHON")
Ejemplo n.º 5
0
 def run(self):
     self.e.load()
     print "Starting Creating Facets processing..."
     if arcpy.Exists(self.i.cmx):    # this forces overwriting the cmx table
         arcpy.Delete_management(self.i.cmx)
     arcpy.gp.ZonalStatisticsAsTable_sa(self.i.catchment, "HydroID", self.i.fac, self.i.cmx, "DATA", "MAXIMUM")
     # --- old code ---
     # arcpy.gp.MakeFeatureLayer(self.i.catchment, "lyr")
     # arcpy.AddJoin_management("lyr", "HydroID", self.i.cmx, "HydroID", "KEEP_ALL")
     # arcpy.SelectLayerByAttribute_management("lyr","NEW_SELECTION","cmx.MAX > cmx.COUNT")
     # arcpy.Clip_analysis(self.i.drl, "lyr", self.i.drl_c, "")
     # arcpy.SelectLayerByAttribute_management("lyr","NEW_SELECTION","cmx.MAX <= cmx.COUNT")     # ESSA PARTE NAO FOI FEITA
     # arcpy.Clip_analysis(self.i.lfp, "lyr", self.i.lfp_c, "")
     # --- old code ---
     self.process_c(self.i.drl, self.i.drl_c)
     # self.process_c(self.i.lfp, self.i.lfp_c)
     arcpy.Erase_analysis(self.i.drl, self.i.drl_c, self.i.lfp_ct)
     arcpy.SpatialJoin_analysis(target_features=self.i.lfp,join_features=self.i.lfp_ct,out_feature_class=self.i.lfp_c,join_operation="JOIN_ONE_TO_ONE",join_type="KEEP_COMMON",match_option="INTERSECT")
     arcpy.Merge_management(self.i.drl_c + ";" + self.i.lfp_c, self.i.fm_vec)
     arcpy.PolylineToRaster_conversion(self.i.fm_vec, "HydroID", self.i.fm_ras, "MAXIMUM_LENGTH", "NONE", self.e.cs)
     arcpy.gp.Divide_sa(self.i.fm_ras, self.i.fm_ras, self.i.fm_ras_d)
     arcpy.gp.Reclassify_sa(self.i.fm_ras_d, "VALUE", "1 NODATA;NODATA 0", self.i.fm_ras_r, "DATA")
     arcpy.gp.Combine_sa(self.i.fm_ras_r + ";" + self.i.cat, self.i.fm_ras_c)
     arcpy.RasterToPolygon_conversion(self.i.fm_ras_c, self.i.facets, "NO_SIMPLIFY", "VALUE")
     # --- old code ---
     # arcpy.RemoveJoin_management("lyr", "")
     # arcpy.SelectLayerByAttribute_management("lyr", "CLEAR_SELECTION", "")
     # arcpy.Delete_management("lyr")
     # --- old code ---
     print "Ending Creating Facets processing..."
Ejemplo n.º 6
0
def _create_random_points(area, buffer_val, points, messages):
    # Process: Buffer
    buffer_name = arcpy.CreateScratchName("temp", data_type="Shapefile", workspace=arcpy.env.scratchFolder)
    arcpy.Buffer_analysis(points, buffer_name, buffer_val, "FULL", "ROUND", "ALL", "", "PLANAR")

    # Process: Erase
    erase_name = arcpy.CreateScratchName("temp", data_type="Shapefile", workspace=arcpy.env.scratchFolder)
    arcpy.Erase_analysis(area, buffer_name, erase_name, "")

    # Process: Create Random Points
    random_name = arcpy.CreateScratchName("temp", data_type="FeatureClass", workspace=arcpy.env.scratchFolder)
    num_points = int(arcpy.GetCount_management(points))
    arcpy.CreateRandomPoints_management(arcpy.env.scratchFolder, random_name, erase_name, "0 0 250 250", num_points,
                                        "1 Meters", "POINT", "0")

    messages.AddMessage(str(arcpy.GetCount_management(random_name)) + " Random points created")

    # array = arcpy.da.FeatureClassToNumPyArray(random_name, "SHAPE@XY")

    # arcpy.Delete_management(buffer_name)
    # arcpy.Delete_management(erase_name)
    # arcpy.Delete_management(random_name)

    # return array
    return random_name
Ejemplo n.º 7
0
def execute_task(args):
    # in_extentDict, data, traj_list = args
    in_extentDict = args

    fc_count = in_extentDict[0]
    print fc_count
    procExt = in_extentDict[1]
    print procExt
    XMin = procExt[0]
    YMin = procExt[1]
    XMax = procExt[2]
    YMax = procExt[3]

    #set environments
    arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax)

    #######  BUFFER ##########################################################################################

    # roads_buffer = "D:\\projects\\intact_land\\intact\\refine\\tiles_t2\\roads_buffer_{}.shp".format(fc_count)

    # arcpy.Buffer_analysis("D:\\projects\\intact_land\\intact\\refine\\mask\\roads.gdb\\region_roads", roads_buffer , "25 meters", "FULL", "ROUND", "ALL")
    if fc_count == '27':

        #######  ERASE ##########################################################################################
        in_features = 'D:\\projects\\intact_land\\intact\\main\\years\\2015.gdb\\clu_2015_noncrop_c'
        erase_features = 'D:\\projects\\intact_land\\intact\\refine\mask\\final.gdb\\region_merged_masks_t2'
        out_feature_class = 'D:\\projects\\intact_land\\intact\\refine\\pp_erase\\clu_2015_noncrop_c_w_masks_{}'.format(
            fc_count)

        arcpy.Erase_analysis(in_features=in_features,
                             erase_features=erase_features,
                             out_feature_class=out_feature_class)
Ejemplo n.º 8
0
def Merger(layer, layerTwo, layerThree):
    arcpy.env.overwriteOutput = True
    mergeLayer = "Temp_{0}".format(arcpy.Describe(layerTwo).name)
    arcpy.Merge_management([layerTwo, layerThree], mergeLayer)
    outLayer = arcpy.Erase_analysis(layer, mergeLayer)
    arcpy.AddWarning(outLayer)
    return outLayer
Ejemplo n.º 9
0
def rasterizeSinglebeam(points,
                        mb,
                        out="__rast",
                        number=6,
                        radius=18,
                        overWrite=False):
    import arcpy
    from arcpy.sa import *
    if overWrite:
        arcpy.env.overwriteOutput = True
    outIDW = Idw(points, "bottom_elevation", 3, 2,
                 RadiusVariable(number, radius))
    if mb:
        conRast = Con(outIDW, 1, '', '')
        poly = arcpy.RasterToPolygon_conversion(conRast, "_poly" + points,
                                                "SIMPLIFY", "")
        buff = arcpy.Buffer_analysis(poly, "buff_" + points, "-15 feet",
                                     "FULL", "", "", "")
        footprint = arcpy.Erase_analysis(buff, mb, "footprint_" + points, "")
        finalRast = arcpy.Clip_management(outIDW, "", "rast_" + points,
                                          footprint, "", "ClippingGeometry")
        arcpy.Delete_management(poly)
        arcpy.Delete_management(buff)
        arcpy.Delete_management(footprint)
    else:
        outIDW.save(arcpy.Describe(points).catalogPath + out)
def create_karabakh(
    in_polygon1, in_line, in_polygon0, out_shp
):  # Arcpy doesn't have a method for cutting polygons with polylines. We use a solution suggested by https://gis.stackexchange.com/a/24757
    print "...Removing Nakhichevan"
    select_shp = "b_temp/select_shp.shp"
    arcpy.Select_analysis(in_polygon1, select_shp, '"ADM2" = \'az3100\'')
    print "...Creating the buffer on the north-east side of the Line of Contact"  # This covers the whole of Azerbaijan proper. Thus, it can be used to erase this part of Azerbaijan to extract Nagorno-Karabakh.
    buffer_shp = "b_temp/temp_buffer.shp"
    arcpy.Buffer_analysis(
        in_line, buffer_shp, "10 DecimalDegrees", "LEFT", "ROUND", "NONE", "",
        "GEODESIC"
    )  # see http://desktop.arcgis.com/en/arcmap/10.3/tools/analysis-toolbox/buffer.htm
    print "...Removing Azerbaijan proper"
    erase_shp = "b_temp/temp_erase.shp"
    arcpy.Erase_analysis(select_shp, buffer_shp, erase_shp)
    print "...Merging Karabakh polygon with the rest of Azerbaijan"
    inFeatures = [in_polygon0, erase_shp]
    outFeatures = out_shp
    arcpy.Union_analysis(inFeatures, outFeatures, "ONLY_FID")
    print "...creating the territory indicator 1/2"
    arcpy.AddField_management(out_shp, "territory", "TEXT")
    print "...creating the territory indicator 2/2"
    arcpy.CalculateField_management(
        out_shp, "territory", "Reclass(!FID_temp_e!)", "PYTHON_9.3",
        "def Reclass(name):\\n    if (name == 0):\\n        return \"NKR\"\\n    else:\\n        return \"AZE\""
    )  # FID_temp_e comes from Erase tool's output (erase_shp), taking the value of 0 for Karabakh and -1 for the rest
    print "Deleting intermediate files"
    files_to_delete = [
        in_polygon1, in_polygon0, select_shp, buffer_shp, erase_shp
    ]
    for file in files_to_delete:
        delete_if_exists(file)
def CombineShorelinePolygons(bndMTL: str, bndMHW: str, inletLines: str,
    ShorelinePts: str, bndpoly: str, SA_bounds: str='', verbose: bool=True):
    """
    Use MTL and MHW contour polygons to create shoreline polygon.
    'Shoreline' = MHW on oceanside and MTL on bayside
    """
    start = time.clock()
    # Inlet lines must intersect the MHW polygon
    symdiff = os.path.join(arcpy.env.scratchGDB, 'shore_1symdiff')
    split = os.path.join(arcpy.env.scratchGDB, 'shore_2split')
    join = os.path.join(arcpy.env.scratchGDB, 'shore_3_oceanMTL')
    erase = os.path.join(arcpy.env.scratchGDB, 'shore_4_bayMTL')
    union_2 = os.path.join(arcpy.env.scratchGDB, 'shore_5union')

    # Create layer (symdiff) of land between MTL and MHW and split by inlets
    print("...delineating land between MTL and MHW elevations...")
    arcpy.Delete_management(symdiff) # delete if already exists
    arcpy.SymDiff_analysis(bndMTL, bndMHW, symdiff)

    # Split symdiff at inlets (and SA_bounds)
    print("...removing the MHW-MTL areas on the oceanside...")
    if len(SA_bounds) > 0:
        arcpy.FeatureToPolygon_management([symdiff, inletLines, SA_bounds], split) # Split MTL features at inlets and study area bounds
    else:
        arcpy.FeatureToPolygon_management([symdiff, inletLines], split) # Split MTL features at inlets
    # Isolate polygons touching shoreline points and erase from symdiff
    arcpy.SpatialJoin_analysis(split, ShorelinePts, split+'_join', "#","KEEP_COMMON", match_option="COMPLETELY_CONTAINS")
    arcpy.Erase_analysis(symdiff, split+'_join', erase)

    # Merge bayside MHW-MTL with above-MHW polygon
    arcpy.Union_analysis([erase, bndMHW], union_2)
    arcpy.Dissolve_management(union_2, bndpoly, multi_part='SINGLE_PART') # Dissolve all features in union_2 to single part polygons
    print('''User input required! Select extra features in {} for deletion.\nRecommended technique: select the polygon/s to keep and then Switch Selection.\n'''.format(os.path.basename(bndpoly)))
    return(bndpoly)
Ejemplo n.º 12
0
def Erase(in_features, erase_features, out_feature_class):
    try:
        arcpy.Erase_analysis(in_features, erase_features, out_feature_class,
                             '')
        #print 'Erase_analysis successful'
    except Exception:
        print arcpy.GetMessages()
def check_schedule_a_is_within(tfl_basename, input_gdb):
    """Takes input edit database and checks to ensure that all schedule A land is
    within the newly created/updated TFL Boundary. Returns False if Schedule A
    exists outside of boundary and True if all Schedule A is within"""
    arcpy.env.workspace = input_gdb

    if arcpy.Exists(input_gdb + os.sep + 'schedule_a_outside'):
        arcpy.Delete_management(input_gdb + os.sep + 'schedule_a_outside')

    arcpy.AddMessage('Lookingfor boundary at : ' + input_gdb + os.sep +
                     tfl_basename + '_Boundary')

    tfl_boundary = arcpy.MakeFeatureLayer_management(
        input_gdb + os.sep + tfl_basename + '_Boundary', 'tfl_boundary')
    tfl_schedule_a = arcpy.MakeFeatureLayer_management(
        input_gdb + os.sep + tfl_basename + '_Schedule_A', 'tfl_schedule_a')
    schedule_a_outside = arcpy.Erase_analysis(tfl_schedule_a, tfl_boundary,
                                              'schedule_a_outside')
    arcpy.Delete_management(tfl_boundary)
    arcpy.Delete_management(tfl_schedule_a)
    if int(arcpy.GetCount_management(schedule_a_outside)[0]) > 0:
        arcpy.AddWarning(
            'ERROR: Found Schedule A polygons outside of TFL Boundary - please review the schedule_a_outside feature class and fix before re-submitting'
        )
        return (False)
    else:
        arcpy.AddMessage('Schedule A is all within the boundary')
        arcpy.Delete_management(schedule_a_outside)
        return (True)
Ejemplo n.º 14
0
def handleCanals(streamNetwork, canal, tempFolder, is_verbose):
    if is_verbose:
        arcpy.AddMessage("Removing canals...")
    if arcpy.GetInstallInfo()['Version'][0:4] == '10.5':
        streamNetworkNoCanals = os.path.join(tempFolder, "NoCanals.shp")
    else:
        streamNetworkNoCanals = os.path.join('in_memory', 'NoCanals')

    arcpy.Erase_analysis(streamNetwork, canal, streamNetworkNoCanals)
    findBraidedReaches(streamNetworkNoCanals, is_verbose)

    with arcpy.da.UpdateCursor(
            streamNetworkNoCanals,
            "IsMultiCh") as cursor:  # delete non-braided reaches
        for row in cursor:
            if row[0] == 0:
                cursor.deleteRow()

    arcpy.MakeFeatureLayer_management(streamNetwork, "lyrBraidedReaches")
    arcpy.MakeFeatureLayer_management(streamNetworkNoCanals, "lyrNoCanals")

    arcpy.SelectLayerByLocation_management("lyrBraidedReaches",
                                           "SHARE_A_LINE_SEGMENT_WITH",
                                           "lyrNoCanals", '', "NEW_SELECTION")

    arcpy.CalculateField_management("lyrBraidedReaches", "IsMultiCh", 1,
                                    "PYTHON")
    arcpy.CalculateField_management("lyrBraidedReaches", "IsMainCh", 0,
                                    "PYTHON")

    arcpy.Delete_management(streamNetworkNoCanals)
Ejemplo n.º 15
0
    def __remove_exclusionary_areas(self, target_features, exclusion_list,
                                    workspace, timestamp):

        exclusive_features = 'tmp_exc_sut_{}'.format(timestamp)

        if exclusion_list:

            arcpy.AddMessage('Removing exclusionary areas...')

            if len(exclusion_list) == 1:
                exclusion_features = exclusion_list[0]
            else:
                exclusion_features = 'tmp_exc_fet_{}'.format(timestamp)
                arcpy.Union_analysis(in_features=exclusion_list,
                                     out_feature_class=os.path.join(
                                         workspace, exclusion_features),
                                     join_attributes='ONLY_FID')

            arcpy.Erase_analysis(in_features=target_features,
                                 erase_features=exclusion_features,
                                 out_feature_class=exclusive_features)
        else:
            exclusive_features = target_features

        return exclusive_features
    def removeFlares(self, object):
        outputLocation = r"C:\Users\lafia\Documents\GitHub\ConceptsOfSpatialInformation\CoreConceptsPy\ArcPy\data\China_noFlares.shp"
        # erase gas flares from country, generates a mask
        arcpy.Erase_analysis(
            r"C:\Users\lafia\Documents\GitHub\ConceptsOfSpatialInformation\CoreConceptsPy\ArcPy\data\China.shp",
            object.filename, outputLocation)

        return outputLocation
Ejemplo n.º 17
0
    def erase_overlaps_from_coverages(self, pid_list_input=None):

        try:

            state_wildcard = self.wildcard

            if pid_list_input is None:

                pid_list = get_path.pathFinder.query_provider_by_FIPS(
                    path_links.num_provider_per_state,
                    str(int(state_wildcard)))
            else:
                pid_list = pid_list_input

            for pid in pid_list:

                fc_wildcard = "Coverage_map_{}_{}_*".format(self.wildcard, pid)
                print(fc_wildcard)
                fc_list = get_path.pathFinder(
                    env_0=self.inputGDB).get_file_path_with_wildcard_from_gdb(
                        fc_wildcard)

                erase_feature_list = get_path.pathFinder(
                    env_0=self.inputGDB2).get_file_path_with_wildcard_from_gdb(
                        "_merged_overlaps_{}".format(self.wildcard))

                output = os.path.join(
                    self.outputGDB,
                    os.path.basename(fc_list[0]) + "_minus_overlaps")

                if arcpy.Exists(output):
                    print("the file exits, skipping!!!!!!!!")
                else:
                    print("Erasing {} from coverage map {}".format(
                        fc_list[0], erase_feature_list[0]))
                    arcpy.Erase_analysis(in_features=fc_list[0],
                                         erase_features=erase_feature_list[0],
                                         out_feature_class=output)
                    print(arcpy.GetMessages(0))
                    logging.info(arcpy.GetMessages(0))

        except arcpy.ExecuteError:
            msgs = arcpy.GetMessages(2)
            arcpy.AddError(msgs)
            print(msgs)
            logging.info(msgs)
        except:
            tb = sys.exc_info()[2]
            tbinfo = traceback.format_tb(tb)[0]
            pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
                sys.exc_info()[1])
            msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"
            arcpy.AddError(pymsg)
            arcpy.AddError(msgs)
            print(pymsg)
            print(msgs)
            logging.info(pymsg)
            logging.info(msgs)
Ejemplo n.º 18
0
def main():
    logging.debug("Starting West Nile Virus Simulation...")
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = f"{config_dict.get('proj_dir')}WestNileOutbreak.gdb"
    aprx = arcpy.mp.ArcGISProject(
        r"C:\Users\Owner\Documents\ArcGIS\Projects\WestNileOutbreak\WestNileOutbreak.aprx"
    )
    for map in aprx.listMaps():
        print("Map: " + map.name)
        for lyr in map.listLayers():
            print("- " + lyr.name)

# Create container for layers.
    layer_list = [
        "Mosquito_Larval_Sites", "Wetlands_Regulatory", "OSMP_Properties",
        "Lakes_and_Reservoirs", "avoid_points"
    ]

    # Define workspace.
    resultsgeodatabase = r"C:\Users\Owner\Documents\ArcGIS\Projects\WestNileOutbreak\WestNileOutbreak.gdb\\"
    arcpy.env.workspace = resultsgeodatabase

    featureclass = arcpy.ListFeatureClasses()

    logging.debug("Ending West Nile Virus Simulation!")

    for layer in layer_list:
        print(layer)
        # Ask user for buffer distance input.
        dist = input(
            "Please type in a buffer distance between 1000-5000 feet: ")
        bufferlayer = buffer(layer, dist)

    featureclass = arcpy.ListFeatureClasses()

    inter_list = [
        "Mosquito_Larval_Sites_buf", "Wetlands_Regulatory_buf",
        "OSMP_Properties_buf", "Lakes_and_Reservoirs_buf"
    ]

    output_intersectlayer = intersect(inter_list)

    target_features = "Boulder_addresses"
    join_features = output_intersectlayer
    out_feature_class = "spatial_join"

    arcpy.SpatialJoin_analysis(target_features, join_features,
                               out_feature_class)
    print("Spatial join layer created.")

    target_features = r"C:\Users\Owner\Documents\ArcGIS\Projects\WestNileOutbreak\WestNileOutbreak.gdb\spatial_join"
    erase_features = r"C:\Users\Owner\Documents\ArcGIS\Projects\WestNileOutbreak\WestNileOutbreak.gdb\avoid_points_buf"
    out_feature_class = r"C:\Users\Owner\Documents\ArcGIS\Projects\WestNileOutbreak\WestNileOutbreak.gdb" \
                        r"\output_final_analysis "

    arcpy.Erase_analysis(target_features, erase_features, out_feature_class)
    print("Erase layer created.")
def erase_mask_list(tile_list, country_shapefile, datadir):
    clipped_list = []
    for tileid in tile_list:
        mask_tile = os.path.join(r"s:\tcd_masks", tileid + ".shp")
        clipped_mask = tileid + "_clip.shp"
        clipped_mask_path = os.path.join(datadir, clipped_mask)
        arcpy.Erase_analysis(mask_tile, country_shapefile, clipped_mask_path)
        clipped_list.append(clipped_mask_path)
    return clipped_list
Ejemplo n.º 20
0
 def erase(self):
     for i in self.listbuffers:
         out_name = i.replace("buffer_with_inputmap", "donut_buffer")
         arcpy.Erase_analysis(i, self.inputmap, out_name, '')
         #self.lista_erases.append(out_name)
     Listerased = arcpy.ListFeatureClasses()
     self.onelist = Listerased
     self.pattern = "_donut_buffer_"
     self.Listerased = MSBuffer.selecInList(self)
Ejemplo n.º 21
0
def _constrain_from_points(constrain_area, excluding_points,
                           excluding_distance, select_inside):
    """
    _constrain_from_points
        Constrains an area to intersect/exclude zones around given points
         
    :param constrain_area: General area to be constrained 
    :param excluding_points: Seed points to create the areas 
    :param excluding_distance: Radius of the areas 
    :param select_inside: Boolean to select if the area should be intersected (True) or excluded (false)
    
    :return: Area after be intersected/excluded  
    """
    global MESSAGES
    MESSAGES.AddMessage("Constraining Area from points...")
    _verbose_print("Constrain Area: {}".format(constrain_area))
    _verbose_print("Excluding points : {}".format(excluding_points))
    _verbose_print("Excluding distance : {}".format(excluding_distance))
    _verbose_print("select inside : {}".format(select_inside))

    scratch_files = []
    try:
        # Create the buffer area from the points
        buffer_scratch = arcpy.CreateScratchName(
            "buff_sct.shp", workspace=arcpy.env.scratchWorkspace)
        arcpy.Buffer_analysis(excluding_points,
                              buffer_scratch,
                              excluding_distance,
                              dissolve_option="ALL")
        scratch_files.append(buffer_scratch)
        _verbose_print(
            "Scratch file created (buffer): {}".format(buffer_scratch))
        combined_scratch = arcpy.CreateScratchName(
            "comb_sct.shp", workspace=arcpy.env.scratchWorkspace)
        # Intersect/Delete from the original area
        if select_inside:
            _verbose_print("Intersect selected")
            arcpy.Intersect_analysis([buffer_scratch, constrain_area],
                                     combined_scratch)
        else:
            _verbose_print("Erase selected")
            arcpy.Erase_analysis(constrain_area, buffer_scratch,
                                 combined_scratch)
        _verbose_print(
            "Scratch file created (erase): {}".format(combined_scratch))
    except:
        _verbose_print("Error constraining from points")
        raise
    finally:
        # This process creates files that will not be needed and are erased at the end of the execution or when  an error is found
        for s_file in scratch_files:
            _verbose_print("Scratch file deleted: {}".format(s_file))
            arcpy.Delete_management(s_file)

    _verbose_print("Constrain from points finished")
    return combined_scratch
    def erase(self):
        for i in self.listbuffers:
            out_name = i.replace("Buffer", "Erase")
            arcpy.Erase_analysis(i, self.UCs, out_name, '')
            self.lista_erases.append(out_name)
        Listerase = arcpy.ListFeatureClasses()
        self.lista = Listerase

        self.pattern = "_Erase_"
        self.Listerase = MSBuffer.selecInList(self)
Ejemplo n.º 23
0
def create_nonFP_byYear():
    ##Description: merge all the couties for a given year and then use erase() function with clu_[year]_crop to get non crop dataset

    yearlist = range(2005, 2016)

    ##create an array to hold all files from all state gdbs for a given year
    for year in yearlist:
        print year
        countylist = []

        arcpy.env.workspace = "D:\\projects\\intactland\\intact_clu\\main\\states"

        # List all file geodatabases in the current workspace
        workspaces = arcpy.ListWorkspaces("*", "FileGDB")

        #get each state geodtabase
        for workspace in workspaces:
            print workspace
            arcpy.env.workspace = workspace

            ##list features for a given year in each state geodatabase
            featureclasses = arcpy.ListFeatureClasses("*_acea_{}*".format(
                str(year)))

            for fc in featureclasses:
                print 'fc:', fc
                substring_list = fc.split("_")
                countylist.append("'" + substring_list[2] + "'")

        print "number of counties for {}: {}".format(str(year),
                                                     len(countylist))

        def createWhereString(countylist):
            print countylist
            cntyString = ' OR atlas_stco='.join(countylist)
            cond = 'atlas_stco={}'.format(cntyString)
            return cond

        if len(countylist) > 0:
            arcpy.env.workspace = "G:\\ancillary_storage\\intactland\\intact_clu\\main\\years"
            in_features = 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\vector\\shapefiles.gdb\\counties'
            layer = 'counties_{}'.format(str(year))
            where_clause = createWhereString(countylist)

            # # # Make a layer from the feature class
            arcpy.MakeFeatureLayer_management(in_features, layer, where_clause)

            #create a feature class containing a counties with data for a given year
            # arcpy.FeatureClassToFeatureClass_conversion(layer, "D:\\projects\\intact_land\\years\\{}.gdb".format(str(year)), "clu_{}_counties".format(str(year)))

            ##------------create to noncrop dataset per year ---------------------------------------------------------------------------
            arcpy.Erase_analysis(
                in_features=layer,
                erase_features="{0}.gdb\\fp_{0}".format(str(year)),
                out_feature_class="{0}.gdb\\nfp_{0}".format(str(year)))
def calcArea_erase_arcpy(selectSites_gdf, envData_gdf, areaField, regionField,
                         envDataField, envDataName, selectedSitesField,
                         selectedSitesFilename):

    ## Erase merged env category data
    sp_erased = arcpy.Erase_analysis(selectSites_gdf, envData_gdf,
                                     "in_memory/temp_erased")
    elapsed_time = (time.time() - start_time) / (60)
    print("Total time for completion: " + str(elapsed_time) + " minutes")
    ## Calculate new Area field
    arcpy.AddField_management(sp_erased, "Area_remaining", "FLOAT")
    arcpy.CalculateField_management(in_table = sp_erased, field = "Area_remaining", \
                                    expression = "!Shape.Area@squarekilometers!", \
                                    expression_type = "PYTHON_9.3")

    ## convert erased gdb table to numpy array to Pandas DF:
    sp_erased_df = pd.DataFrame(arcpy.da.FeatureClassToNumPyArray(sp_erased, \
                                                                      [regionField, areaField, 'Area_remaining']))

    area_erased_regionSum_df = sp_erased_df.groupby(
        [regionField])['Area_remaining'].sum().reset_index()

    ##### use the original (unerased selectedSites_gdf) feature/shapefile, convert to pd df and calculate the area_allSelSites_km2
    area_orig_df = pd.DataFrame(
        arcpy.da.FeatureClassToNumPyArray(selectSites_gdf,
                                          [regionField, areaField]))
    ## Calculate the sum by region
    area_orig_regionSum_df = area_orig_df.groupby(
        [regionField])[areaField].sum().reset_index()

    ## merge the erased and original regionSum_df
    area_merged_df = area_erased_regionSum_df.merge(area_orig_regionSum_df,
                                                    how="outer",
                                                    on=regionField)

    ## subtract Area_remaining from Area (original)
    area_merged_df["area_envData_km2"] = area_merged_df[
        "Area"] - area_merged_df["Area_remaining"]

    ## rename fields as needed
    area_merged_df.rename(columns={
        areaField: 'area_allSelSites_km2',
        regionField: 'region'
    },
                          inplace=True)
    ## assign new fields
    area_merged_df[envDataField] = envDataName
    area_merged_df[selectedSitesField] = selectedSitesFilename
    ## remove "Area_remaining" column
    area_merged_df.drop(["Area_remaining"], axis=1)

    area_merged_df["percent_selSites"] = area_merged_df[
        "area_envData_km2"] / area_merged_df['area_allSelSites_km2']

    return area_merged_df
Ejemplo n.º 25
0
def table(zones, idFld, output, exterior=True, selfrel=True):
    common.debug('running neighbour table', zones, idFld, output, exterior,
                 selfrel)
    with common.PathManager(output) as pathman:
        if exterior:
            common.progress('mapping zone surroundings')
            buffer = pathman.tmpFC()
            arcpy.Buffer_analysis(zones,
                                  buffer,
                                  '50 Meters',
                                  dissolve_option='ALL')
            common.progress('creating exterior zone')
            erased = pathman.tmpFC()
            arcpy.Erase_analysis(buffer, zones, erased, TOLERANCE)
            common.progress('identifying exterior zone')
            common.calcField(erased, idFld, EXTERIOR_ID,
                             common.pyTypeOfField(zones, idFld))
            # common.progress('eliminating sliver polygons')
            common.progress('merging exterior zone')
            jointo = pathman.tmpFC()
            arcpy.Merge_management([zones, erased], jointo)
        else:
            jointo = zones
        common.progress('finding neighbours')
        joined = pathman.tmpFC()
        fm = arcpy.FieldMappings()
        fm.addFieldMap(
            common.fieldMap(zones, idFld, common.NEIGH_FROM_FLD, 'FIRST'))
        fm.addFieldMap(
            common.fieldMap(jointo, idFld, common.NEIGH_TO_FLD, 'FIRST'))
        arcpy.SpatialJoin_analysis(zones, jointo, joined, 'JOIN_ONE_TO_MANY',
                                   'KEEP_COMMON', fm, 'INTERSECT', TOLERANCE)
        common.progress('converting to neighbour table')
        fm2 = arcpy.FieldMappings()
        fm.addFieldMap(
            common.fieldMap(joined, common.NEIGH_FROM_FLD,
                            common.NEIGH_FROM_FLD, 'FIRST'))
        fm.addFieldMap(
            common.fieldMap(joined, common.NEIGH_TO_FLD, common.NEIGH_TO_FLD,
                            'FIRST'))
        if selfrel:
            query = common.safeQuery(
                "[{}] <> '{}'".format(common.NEIGH_FROM_FLD, EXTERIOR_ID),
                joined)
        else:
            query = common.safeQuery(
                "[{0}] <> [{1}] AND [{0}] <> '{2}'".format(
                    common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD, EXTERIOR_ID),
                joined)
        arcpy.TableToTable_conversion(joined, pathman.getLocation(),
                                      pathman.getOutputName(), query, fm2)
        common.clearFields(output,
                           [common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD])
    return output
Ejemplo n.º 26
0
def erase_PAD(state, ras, replace):
    # Process: Erase

    pad_state = constants.pad_dir + 'PAD-US_' + state + '\\PADUS1_3_' + state + '.gdb\\PADUS1_3' + state
    pad_out_dir = constants.pad_dir + 'output\\' + state + os.sep
    bound_out_dir = constants.bound_dir + 'output\\' + state + os.sep
    state_dir = constants.out_dir + os.sep + state + os.sep

    constants.make_dir_if_missing(pad_out_dir)
    constants.make_dir_if_missing(bound_out_dir)
    constants.make_dir_if_missing(state_dir)

    select_state = bound_out_dir + state + '.shp'
    erased_pad = pad_out_dir + state + '.shp'
    extract_comb = state_dir + 'ext_' + state + '_' + str(
        constants.START_YEAR)[2:] + '_' + str(constants.END_YEAR)[2:]

    #
    if arcpy.Exists(select_state) and not (replace):
        pass
    else:
        where = '"STATE_ABBR" = ' + "'%s'" % state.upper()
        try:
            arcpy.Select_analysis(constants.BOUNDARIES, select_state, where)
        except:
            logging.info(arcpy.GetMessages())

    #
    if arcpy.Exists(erased_pad) and not (replace):
        pass
    else:
        try:
            arcpy.Erase_analysis(select_state, pad_state, erased_pad, "")
        except:
            logging.info(arcpy.GetMessages())

    #
    if arcpy.Exists(extract_comb) and not (replace):
        pass
    else:
        try:
            # Create bounding box from polygon (xmin, ymin, xmax, ymax)
            #desc = arcpy.Describe(erased_pad)
            #rectangle = "%s %s %s %s" % (desc.extent.XMin, desc.extent.YMin, desc.extent.XMax,   desc.extent.YMax)

            #arcpy.Clip_management(ras,rectangle,extract_comb,erased_pad,"#","ClippingGeometry")
            arcpy.gp.ExtractByMask_sa(ras, erased_pad, extract_comb)
        except:
            logging.info(arcpy.GetMessages())

    logging.info('\t Erasing PAD from state ' + state)
    return extract_comb
Ejemplo n.º 27
0
def handleCanals(stream_network, canal, perennial_network, temp_folder, is_verbose):
    """
    Finds braided sections of the stream network, not counting canals, if canals are available
    :param stream_network:
    :param canal:
    :param temp_folder:
    :param is_verbose:
    :return:
    """
    if is_verbose:
        arcpy.AddMessage("Removing canals...")
    if arcpy.GetInstallInfo()['Version'][0:4] == '10.5':
        stream_network_no_canals = os.path.join(temp_folder, "NoCanals.shp")
        perennial_no_canals = os.path.join(temp_folder, "PerenNoCanals.shp")
    else:
        stream_network_no_canals = os.path.join('in_memory', 'NoCanals')
        perennial_no_canals = os.path.join('in_memory', "PerenNoCanals")

    arcpy.Erase_analysis(stream_network, canal, stream_network_no_canals)
    if perennial_network is not None:
        arcpy.Erase_analysis(perennial_network, canal, perennial_no_canals)
    else:
        perennial_no_canals = None
    findBraidedReaches(stream_network_no_canals, perennial_no_canals, is_verbose)

    with arcpy.da.UpdateCursor(stream_network_no_canals, "IsMultiCh") as cursor: # delete non-braided reaches
        for row in cursor:
            if row[0] == 0:
                cursor.deleteRow()

    arcpy.MakeFeatureLayer_management(stream_network, "lyrBraidedReaches")
    arcpy.MakeFeatureLayer_management(stream_network_no_canals,"lyrNoCanals")

    arcpy.SelectLayerByLocation_management("lyrBraidedReaches","SHARE_A_LINE_SEGMENT_WITH","lyrNoCanals",'',"NEW_SELECTION")

    arcpy.CalculateField_management("lyrBraidedReaches","IsMultiCh",1,"PYTHON")
    arcpy.CalculateField_management("lyrBraidedReaches","IsMainCh",0,"PYTHON")

    arcpy.Delete_management(stream_network_no_canals)
Ejemplo n.º 28
0
def erase(inShp, erase_feat, out):
    """
    Difference between two feature classes
    """
    
    import arcpy
        
    arcpy.Erase_analysis(
        in_features=inShp, erase_features=erase_feat, 
        out_feature_class=out
    )
    
    return out
Ejemplo n.º 29
0
def innerWedgeErase(centerX, centerY, r2, wedge, projOut):

    """Cut out the inner part of the wedge based upon the original point feature
    class's radius2 field.  Returns a string with the location of the output
    wedge.

    Keyword arguments:
    centerX -- The X coordinate of the center of the wedge (int or float)
    centerY -- The Y coordinate of the center of the wedge (int or float)
    r2      -- The inner radius of the wedge (int or float)
    wedge   -- A string specifying the location of the wedge feature class or
    object from which to erase (string)
    projOut      -- The projection of the wedge (arcpy.SpatialReference)

    r2 must be greater than 0 and must be in meters.
    """
    
    try:

        #Create an arcpy.Point() object that will be buffered        
        pt = arcpy.Point()
        pt.X = centerX
        pt.Y = centerY

        #Create a pointGeometry out of the arcpy.Point object, then add the
        #pointGeometry object to the list that will be used in the
        #CopyFeatures_management geoprocessor tool below.
        pointGeometry = arcpy.PointGeometry(pt,projOut)

        # Make a list with the center point in order to create a feature class
        # with just this one point.  This feature class will be buffered later
        # to create the circle.
        centerList = []
        centerList.append(pointGeometry)

        #Buffer the wedge center by the inner radius distance, then use that
        #buffer to erase from the input wedge
        circle = "in_memory\\circle"
        arcpy.Buffer_analysis(centerList, circle, str(r2) + ' METERS')
        oWedge2 = "in_memory\\oWedge2"
        arcpy.Erase_analysis(wedge, circle, oWedge2)
        arcpy.Delete_management(circle)
        del centerList
        arcpy.Delete_management(wedge)
        return oWedge2

    except Exception as e:
        tb = sys.exc_info()[2]
        arcpy.AddError("An error occured on line %i" % tb.tb_lineno)
        print str(e)
        arcpy.AddError(str(e))
Ejemplo n.º 30
0
 def geo_process(self, gbz_merge_lyr, dltb_query_lyr):
     """
     地理逻辑处理
     :param dltb_query_lyr: 定义查询后的地类图斑数据
     :param gbz_merge_lyr: 所有高标注农田的合并图层
     :return:
     """
     out_feature_class = "erase_left"
     arcpy.Erase_analysis(dltb_query_lyr, gbz_merge_lyr, out_feature_class)
     print "Erase success"
     builded_area = "builded_area"
     arcpy.Erase_analysis(dltb_query_lyr, out_feature_class, builded_area)
     print "Erase success"
     
     builded_area_id = "builded_area_id"
     arcpy.Identity_analysis(builded_area, self.input_xzq, builded_area_id)
     print "Identity success"
     fields_o = arcpy.ListFields(builded_area_id)
     f_exist = all([ezarcpy.check_field_exit(fields_o, i) for i in
                    ["XZQDM", "XZQMC", "Shape_Area"]])
     if not f_exist:
         raise RuntimeError("field does not exist")
 
     # 添加字段XZQ,用于存放XZQDM&XZQMC
     ezarcpy.add_field(builded_area_id, [XZQ], "TEXT", 60)
     expression = "[XZQDM] & [XZQMC]"
     arcpy.CalculateField_management(builded_area_id, XZQ, expression, "VB")
     cal_rel_area = "cal_rel_area"
     if self.name:
         cal_rel_area = self.name
     arcpy.Dissolve_management(builded_area_id, cal_rel_area, "XZQ")
     print "Dissolve suceess"
     # 添加字段MJM,用于存放面积亩
     ezarcpy.add_field(cal_rel_area, [MJM], "DOUBLE")
     expression2 = "[Shape_Area] * 0.0015"
     arcpy.CalculateField_management(cal_rel_area, MJM, expression2, "VB")
     print u"output {}.shp".format(self.name)
     return cal_rel_area