示例#1
0
# make a lyr
#arcpy.MakeFeatureLayer_management(infile2, infile2 + "_lyr")
'''
**************************************************************************************************
'''
with arcpy.da.UpdateCursor(infile2,
                           ["Shape@", "x_coord", "y_coord"]) as cursor:
    ix = 0
    for row in cursor:
        print(ix)
        if os.path.isfile(outASCII + crater_id[ix] + '.asc'):
            ix = ix + 1
        else:
            #query selection CENTER
            query = "CRATER_ID = '" + crater_id[ix] + "'"
            arcpy.SelectLayerByAttribute_management(infile2, "NEW_SELECTION",
                                                    query)

            # make a layer of the selection
            arcpy.CopyFeatures_management(infile2, "CENTER_TMP")

            # old coordinate systems
            desc = arcpy.Describe("CENTER_TMP")
            spatialReference = desc.spatialReference

            # project to the right coordinate systems
            # central meridian should be replaced by the longitude
            # standard parallel_1 by the latitude
            cent_med = np.round(row[1], decimals=0)
            std_parall = np.round(row[2], decimals=0)

            str_bef = "PROJCS['Equirectangular_Moon',GEOGCS['GCS_Moon',DATUM['D_Moon',SPHEROID['Moon_localRadius',1737400.0,0.0]],PRIMEM['Reference_Meridian',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Equidistant_Cylindrical'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],"
sr = arcpy.SpatialReference(102039)

# Set up your AOIs, then loop through each and save AOI rotations to some output directory
arcpy.MakeFeatureLayer_management(watershedFile, 'fishnet')

FIDs = [130, 131, 132, 133, 134, 149, 150, 151, 152, 153, 168, 169, 170 , 171, 172]

# for FID in range(0, 279):
# for FID in range(17, 18):
for FID in FIDs:
	outRotation = outDir + '/tile_' + str(FID)
	print outRotation
	if arcpy.Exists(outRotation):
		continue
	arcpy.SelectLayerByAttribute_management('fishnet', 'NEW_SELECTION', '"FID" = ' + str(FID))

	rid = str(random.randint(10000,99999))
	watershedCdlPrj = tempGdb + '/watershedCdlPrj_' + rid
	samplePts = tempGdb + '/samplePts_' + rid
	outRotation1 = tempGdb + '/outRotation1_' + rid
	outHigh1 = tempGdb + '/outHigh1_' + rid
	outLow1 = tempGdb + '/outLow1_' + rid
	cdlUrl = r'http://nassgeodata.gmu.edu:8080/axis2/services/CDLService/GetCDLFile?'

	arcpy.AddMessage("Projecting Area Of Interest to Cropland Data Layer projection...")
	print watershedCdlPrj
	arcpy.Project_management('fishnet', watershedCdlPrj, sr)

	ext = arcpy.Describe(watershedCdlPrj).extent
map_df = arcpy.mapping.ListDataFrames(mxd, "*")[0]

# make empty


list_of_field_values = unique_fieldname.unique

with arcpy.da.SearchCursor(fc, unique_fieldname) as cursor:
	for row in cursor:
		list_of_field_values.append(row[0])

for field_value in list_of_field_values:
	#select feature by atribute from layer - attribute being boro name
	arcpy.SelectLayerByAttribute_management(
		fc,
		"NEW_SELECTION",
		""" "{}" = '{}'' """.format(unique_fieldname, field_value),
		)

	map_df.zoomToSelectedFeatures()

#   arcpy.clearselectedfeature

#	arcpy.mapping.ExportToPNG




def feature_class_to_dataframe(in_fc):
    field_list = get_fc_fields(in_fc)
    df = pd.DataFrame([row for row in arcpy.da.SearchCursor(in_fc, field_list)])  # converting feature class to df
示例#4
0
arcpy.AddGeometryAttributes_management(
    wdpa_all_relevant_shape_simpl_dissolved_iso3_clipped_land, "AREA_GEODESIC",
    "", "SQUARE_KILOMETERS",
    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
)
print("Area_geo re-computed")

# Process: Make Feature Layer over 1 km2 (2)
arcpy.MakeFeatureLayer_management(
    wdpa_all_relevant_shape_simpl_dissolved_iso3_clipped_land,
    dissolved_iso3_clipped_land_over_1_km2, "\"AREA_GEO\" >=1", "",
    "iso3 iso3 VISIBLE NONE;AREA_GEO AREA_GEO VISIBLE NONE;Shape_length Shape_length VISIBLE NONE;Shape_area Shape_area VISIBLE NONE"
)

# Process: Select Layer By Attribute (2)
arcpy.SelectLayerByAttribute_management(dissolved_iso3_clipped_land_over_1_km2,
                                        "NEW_SELECTION", "iso3 NOT LIKE '%;%'")

# Process: Make Feature Layer NO multi iso3
arcpy.MakeFeatureLayer_management(
    select_NO_multi_iso3, wdpa_NO_multi_iso3, "", "",
    "iso3 iso3 VISIBLE NONE;AREA_GEO AREA_GEO VISIBLE NONE;Shape_length Shape_length VISIBLE NONE;Shape_area Shape_area VISIBLE NONE"
)

# Process: Copy Features NO multi iso3
arcpy.CopyFeatures_management(wdpa_NO_multi_iso3, iso3_no_multi, "", "0", "0",
                              "0")
print("Feature class with only No-multi iso3 created")

# Process: Add Field myISO3
arcpy.AddField_management(iso3_no_multi, "myISO3", "TEXT", "", "", "", "",
                          "NULLABLE", "NON_REQUIRED", "")
示例#5
0
def convertProposedToRasterCredit(anthroFeaturesRemoved, cellSize):
    arcpy.AddMessage("Preparing Proposed Surface Disturbance for processing")
    # Add field Conifer to use when converting to raster
    inTable = anthroFeaturesRemoved
    fieldName = "Weight2"
    fieldType = "SHORT"
    expression = 1
    arcpy.AddField_management(inTable, fieldName, fieldType)
    arcpy.CalculateField_management(inTable, fieldName, expression, "PYTHON_9.3", "")

    # Check feature type of provided feature class
    desc = arcpy.Describe(anthroFeaturesRemoved)

    # Make feature layer of proposed surface disturbance
    features = arcpy.MakeFeatureLayer_management(anthroFeaturesRemoved, "lyr")

    # Generate list of unique subtypes in Proposed_Surface_Disturbance
    uniqueProposedSubtypes = list(set([row[0] for row in arcpy.da.SearchCursor(
        anthroFeaturesRemoved, "Subtype")]))
    arcpy.AddMessage("Proposed Surface Disturbance contains "
                     + ", ".join(uniqueProposedSubtypes))

    for subtype in uniqueProposedSubtypes:
        # Select features of specified subtype
        field = "Subtype"
        where_clause = """{} = '{}'""".format(arcpy.AddFieldDelimiters
                                              (features, field), subtype)

        arcpy.SelectLayerByAttribute_management(features,
                                                "NEW_SELECTION",
                                                where_clause)

        # Count features selected to ensure >0 feature selected
        test = arcpy.GetCount_management(features)
        count = int(test.getOutput(0))

        # Convert to raster
        if count > 0:
            in_features = features
            value_field = "Weight"
            out_rasterdataset = os.path.join("in_memory", "Proposed_" + subtype + "Null")
            cell_assignment = "MAXIMUM_AREA"
            priority_field = "Weight"

            if desc.shapeType == "Polygon":
                arcpy.PolygonToRaster_conversion(in_features,
                                                 value_field,
                                                 out_rasterdataset,
                                                 cell_assignment,
                                                 priority_field,
                                                 cellSize)
            else:  # Consider changing to buffer of ? meters
                arcpy.FeatureToRaster_conversion(in_features,
                                                 value_field,
                                                 out_rasterdataset,
                                                 cellSize)

            # Change Null values to 0 in proposed anthro feature removed raster
            out_con = Con(IsNull(out_rasterdataset), 0, out_rasterdataset)
            out_con.save("Proposed_" + subtype)

        # Clear selected features
        arcpy.SelectLayerByAttribute_management(features, "CLEAR_SELECTION")

    return uniqueProposedSubtypes
try:

    inTTs = os.path.join(Final_master_GDB,
                         "BAY_AREA_2016_Delivered_TreeTops_AF")

    fields = [
        field.name for field in arcpy.ListFields(inTTs)
        if field.name not in exclude_fields
    ]

    QUERY = "'ZONE1','ZONE1_OH1','ZONE1_OH2'"

    arcpy.MakeFeatureLayer_management(inTTs, "BAY_AREA_2016_TreeTops_AF")
    arcpy.SelectLayerByAttribute_management("BAY_AREA_2016_TreeTops_AF",
                                            "NEW_SELECTION",
                                            ' "DC_AF" IN (%s) ' % (QUERY))

    tempsublyrOUT = r'in_memory\TTs_Temp'

    arcpy.Select_analysis("BAY_AREA_2016_TreeTops_AF", tempsublyrOUT)

except:

    Arcpy.AddMessage(
        "Your input GDB Treetops FC needs to be named:  >BAY_AREA_2016_Delivered_TreeTops_AF<"
    )

with arcpy.da.SearchCursor(tempsublyrOUT,
                           fields) as sCur, open(del_csv, 'wb') as outcsv:
    writer = csv.writer(outcsv)
def prep_data(fdhs):
    """
    This function helps in data preparation
    PARAMETERS
    ----------
    fdhs : list
        A list of FDH IDs for which you need the BOMs
    """
    crs = arcpy.SpatialReference(2231)
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = scratch
    print("FILTERING DATA")
    """
    If there are any Feature classes or Tables present in the scratch GDB,
    remove all of them
    """
    fcs = arcpy.ListFeatureClasses()
    for fc in fcs:
        arcpy.Delete_management(scratch + '/' + fc)
    tables = arcpy.ListTables()
    for table in tables:
        arcpy.Delete_management(scratch + '/' + table)

    # The keys present in the following dictionary are the feature classes
    # Data from these feature classes are gathered to generate BOM
    # and the values are the attributes present in those feature classes.
    # These attributes are later(lines 147 - 166) used in filtering the data
    name_dict = {
        'FiberLine': 'cablename',
        'FC_Structure': 'layer',
        'StructureLine': 'layer',
        'fdhpoint': 'fdhid',
        'SplicePoint': 'locationdescription',
        'FiberSlackLoop': 'designid'
    }

    # The following fdh expression helps in generating a query of below form
    # ("fdhid" = 'DIX101d-F31' or "fdhid" = 'DIX101d-F32' or "fdhid" = 'DIX101d-F33')
    # which can later be used to select only the required FD Boundaries
    fdh_exp = "(" + " or ".join(["fdhid = '{0}'".format(x)
                                 for x in fdhs]) + ")"
    fdh_exp.encode('utf-8').strip()
    # Select only those FDH Boundaries for which the BOMs needs to be generated
    arcpy.Select_analysis(gdb + "\\fdhboundary", scratch + "\\fdhs", fdh_exp)
    """ Exlanations for Queries used inside select_analysis for the for loop part that comes next

    # Query for Structure and Conduit
    # 		Select only those structures and conduits for which the status is 'Preliminary'
    #       and the ones which are present inside the FDH Boundaries we are working on (This part is
    #       handled using Intersect_analysis)
    # Then for the next elif part, the queries are much similar to the above queries and so are self explanatory
    # Same goes for final else part
    """

    for fc in name_dict.keys(
    ):  # ["FiberOpticCable", "FC_Structure", "FC_Conduit", "fdhpoint", "SplicePoint"]:
        fieldnames = [
            field.name for field in arcpy.ListFields(gdb + "\\" + fc)
        ]

        if fc == "SplicePoint":
            arcpy.Select_analysis(gdb + "\\" + fc, scratch + "\\" + fc)

        elif fc in ['FC_Structure', 'StructureLine']:
            arcpy.Select_analysis(gdb + "\\" + fc, scratch + "\\temp_" + fc,
                                  "inventory_status_code = 'Preliminary'")
            arcpy.Intersect_analysis(
                [scratch + "\\temp_" + fc, scratch + "\\fdhs"],
                scratch + "\\" + fc)
        elif "inventory_status_code" in fieldnames:
            arcpy.Select_analysis(
                gdb + "\\" + fc, scratch + "\\pre_" + fc, "(" + " or ".join(
                    ["{0} like '{1}%'".format(name_dict[fc], x)
                     for x in fdhs]) +
                ") and inventory_status_code = 'Preliminary'")
            arcpy.Select_analysis(
                gdb + "\\" + fc, scratch + "\\" + fc, "(" + " or ".join(
                    ["{0} like '{1}%'".format(name_dict[fc], x)
                     for x in fdhs]) + ")")
            arcpy.AddField_management(scratch + "\\pre_" + fc, "fdhid", "TEXT")
            arcpy.CalculateField_management(
                scratch + "\\pre_" + fc, "fdhid",
                "getfdh(!{0}!)".format(name_dict[fc]), "PYTHON_9.3", codeblock)
        else:
            arcpy.Select_analysis(
                gdb + "\\" + fc, scratch + "\\" + fc, "(" + " or ".join(
                    ["{0} like '{1}%'".format(name_dict[fc], x)
                     for x in fdhs]) + ")")

        # Make sure there is an 'fdhid' column for all of the feature classes.
        # There is no special reason for this. It's just to make some of the other geo-processing operations faster
        fieldnames = [
            field.name for field in arcpy.ListFields(scratch + "\\" + fc)
        ]
        if "fdhid" not in fieldnames:
            arcpy.AddField_management(scratch + "\\" + fc, "fdhid", "TEXT")
            arcpy.CalculateField_management(
                scratch + "\\" + fc, "fdhid",
                "getfdh(!{0}!)".format(name_dict[fc]), "PYTHON_9.3", codeblock)

    # Select only Access Fiber, changed 12/07 to grab all fiber intersecting an FDH, and included 'Lateral' infrastructure class query so that 288 cts are counted.
    arcpy.Intersect_analysis([gdb + "\\FiberLine", scratch + "\\fdhs"],
                             scratch + "\\af_1", '', '', 'LINE')
    arcpy.Select_analysis(
        scratch + "\\af_1", scratch + "\\af",
        "infrastructureclass = 'Access' OR infrastructureclass = 'Lateral'")

    # Get the end points of the Access Fiber
    get_end_points(scratch + "\\af", scratch + "\\af_ends", "BOTH_ENDS")

    # Get those fiber ends which intersects with Splice Point
    arcpy.SpatialJoin_analysis(scratch + "\\SplicePoint",
                               scratch + "\\af_ends", scratch + "\\af_sc_join",
                               "JOIN_ONE_TO_MANY", "KEEP_ALL", "", "INTERSECT",
                               "")

    # We dissolve the output from previous step just to make sure we have only one entry even for the points where multiple fibercable intersect with a splice point
    # We will take into consideration only the fibercable with maximum fiber count. Thats the reason why we use ["fibercount", "MAX"]

    arcpy.Dissolve_management(
        scratch + "\\af_sc_join", scratch + "\\final_scs", [
            "locationdescription", "splice_type", "splice_count", "fdhid",
            "fiber_assignments", "spliceenclosuremodelnumber"
        ], [["fibercount", "MAX"]])  # "cable_size",
    arcpy.AlterField_management(scratch + "\\final_scs", "MAX_fibercount",
                                "fcount", "fcount")
    arcpy.AlterField_management(scratch + "\\final_scs",
                                "spliceenclosuremodelnumber", "sc_size",
                                "sc_size")

    # The below set of lines (220- 227) are used to create a feature class with name final_vaults
    # A new attribute named 'pvault' is added and it's value is either 'Y' or 'N' - Changed 12/07/2020 to only include preliminary structures pvault = 'N'

    # Added prelim_vaults 12/07/2020
    arcpy.Select_analysis(gdb + "\\FC_Structure", scratch + "\\prelim_vaults",
                          "inventory_status_code = 'Preliminary'")

    arcpy.AddField_management(scratch + "\\FC_Structure", "pvault", "TEXT")
    arcpy.MakeFeatureLayer_management(scratch + "\\FC_Structure", "vaults")
    # arcpy.CalculateField_management("vaults", "pvault", "'N'", "PYTHON_9.3", "")
    arcpy.SelectLayerByLocation_management("vaults", "INTERSECT",
                                           scratch + "\\prelim_vaults", "",
                                           "NEW_SELECTION")
    arcpy.CalculateField_management("vaults", "pvault", "'N'", "PYTHON_9.3",
                                    "")
    arcpy.SelectLayerByAttribute_management("vaults", "CLEAR_SELECTION")
    arcpy.CopyFeatures_management("vaults", scratch + "\\final_vaults")

    # The following set of lines(234 - 240) are used to find out whether an access fiber cable is an FDH cable.
    # Any Acces Fibercable that intersects FDH point is an 'FDH cable.'
    # So, we add a new field named 'fdhcable' and it's values are 'Y' or 'N'
    # If the value is 'Y' - it means fiber is an FDH Cable else it is not.
    # And the final result is copied into scratch GDB just like vaults

    arcpy.AddField_management(scratch + "\\af", "fdhcable", "TEXT")
    arcpy.MakeFeatureLayer_management(scratch + "\\af", "fiber")
    arcpy.SelectLayerByLocation_management("fiber", "INTERSECT",
                                           scratch + "\\fdhpoint", "",
                                           "NEW_SELECTION")
    arcpy.CalculateField_management("fiber", "fdhcable", "'Y'", "PYTHON_9.3",
                                    "")
    arcpy.SelectLayerByAttribute_management("fiber", "CLEAR_SELECTION")
    arcpy.CopyFeatures_management("fiber", scratch + "\\final_fiber")

    arcpy.AddGeometryAttributes_management(scratch + "\\final_fiber",
                                           "LENGTH_GEODESIC", "FEET_US", "",
                                           crs)

    arcpy.Select_analysis(scratch + "\\StructureLine", scratch + "\\all_con",
                          "diameter = '2inch' or diameter = '1.25inch'")
    arcpy.AddField_management(scratch + "\\all_con", "shared", "TEXT")
    arcpy.CalculateField_management(scratch + "\\all_con", "shared", "'N'",
                                    "PYTHON_9.3", "")
    arcpy.SplitLine_management(scratch + "\\all_con", scratch + "\\con_split")
    get_end_points(scratch + "\\con_split", scratch + "\\con_mids", "MID")
    arcpy.AddField_management(scratch + "\\con_mids", "trench", "SHORT")
    arcpy.CalculateField_management(scratch + "\\con_mids", "trench", "1",
                                    "PYTHON_9.3", "")
    arcpy.Buffer_analysis(scratch + "\\con_mids", scratch + "\\con_mid_buff",
                          "1.5 FEET", "FULL", "ROUND")
    arcpy.Dissolve_management(scratch + "\\con_mid_buff",
                              scratch + "\\con_mid_diss", "", "",
                              "SINGLE_PART", "")
    arcpy.AddField_management(scratch + "\\con_mid_diss", "mid_id", "LONG")
    arcpy.CalculateField_management(scratch + "\\con_mid_diss", "mid_id",
                                    "!objectid!", "PYTHON_9.3", "")
    arcpy.SpatialJoin_analysis(scratch + "\\con_mid_buff",
                               scratch + "\\con_mid_diss",
                               scratch + "\\con_join_temp", "JOIN_ONE_TO_ONE",
                               "KEEP_ALL", "", "INTERSECT", "")
    arcpy.Dissolve_management(scratch + "\\con_join_temp",
                              scratch + "\\con_mid_diss_temp", ["mid_id"],
                              [["trench", "SUM"]], "SINGLE_PART", "")
    arcpy.AlterField_management(scratch + "\\con_mid_diss_temp", "SUM_trench",
                                "trench", "trench")
    arcpy.SpatialJoin_analysis(scratch + "\\con_split",
                               scratch + "\\con_mid_diss_temp",
                               scratch + "\\con_join", "JOIN_ONE_TO_ONE",
                               "KEEP_ALL", "", "INTERSECT", "")

    arcpy.Select_analysis(scratch + "\\con_join", scratch + "\\con2",
                          "diameter = '2inch'")
    arcpy.Select_analysis(scratch + "\\con_join", scratch + "\\con125",
                          "diameter = '1.25inch'")
    arcpy.Buffer_analysis(scratch + "\\con2", scratch + "\\con2_buff",
                          "2 FEET", "FULL", "ROUND", "ALL")
    arcpy.MakeFeatureLayer_management(scratch + "\\con125", "con125")
    arcpy.SelectLayerByLocation_management("con125", "WITHIN",
                                           scratch + "\\con2_buff", "",
                                           "NEW_SELECTION")
    arcpy.CalculateField_management("con125", "shared", "'Y'", "PYTHON_9.3",
                                    "")
    arcpy.SelectLayerByAttribute_management("con125", "CLEAR_SELECTION")
    arcpy.Merge_management([scratch + "\\con2", "con125"],
                           scratch + "\\final_con")
    arcpy.AddGeometryAttributes_management(scratch + "\\final_con",
                                           "LENGTH_GEODESIC", "FEET_US", "",
                                           crs)

    arcpy.Dissolve_management(scratch + "\\final_con", scratch + "\\trench",
                              ["fdhid"])
    arcpy.AddGeometryAttributes_management(scratch + "\\trench",
                                           "LENGTH_GEODESIC", "FEET_US", "",
                                           crs)

    print("DATA FILTERATION DONE..")
     if nSel == -1:
         addMsgAndPrint('    appears to be no field named ' +
                        mFields[0])
     else:
         addMsgAndPrint('    selected ' + mFields[0] + ' = ' +
                        vals[0] + ', n = ' + str(nSel))
 else:  # reselect rows where dependent values are NULL and assign new value
     if nSel > 0:
         whereClause = selField + ' IS NULL'  # OR '+selField+" = ''"
         if mFieldTypeDict[mFields[i]] == 'String':
             whereClause = whereClause + ' OR ' + selField + " = ''" + ' OR ' + selField + " = ' '"
         elif mFieldTypeDict[mFields[i]] in [
                 'Double', 'Single', 'Integer', 'SmallInteger'
         ]:
             whereClause = whereClause + ' OR ' + selField + ' = 0'
         arcpy.SelectLayerByAttribute_management(
             'tempT', 'NEW_SELECTION', whereClause)
         nResel = int(str(arcpy.GetCount_management(
             'tempT')))  # convert result object to int
         addMsgAndPrint('      reselected ' + mFields[i] +
                        ' = NULL, blank, or 0, n = ' + str(nResel))
         if nResel > 0:
             if mFieldTypeDict[mFields[i]] == 'String':
                 arcpy.CalculateField_management(
                     'tempT', mFields[i], '"' + str(vals[i]) + '"')
             elif mFieldTypeDict[mFields[i]] in [
                     'Double', 'Single', 'Integer', 'SmallInteger'
             ]:
                 arcpy.CalculateField_management(
                     'tempT', mFields[i], vals[i])
             addMsgAndPrint('        calculated ' + mFields[i] +
                            ' = ' + str(vals[i]))
        def run(IFile, CFile, OFile, RFile, Rnum, Cnum):
            import arcpy
            from arcpy import env
            env.overwriteOutput = "True"
            print "--------------------------------------------------------------------"
            print "Program ClipIntoFishnet Starts: ", time.asctime(
                time.localtime(time.time()))
            print "--------------------------------------------------------------------"
            ## Clip the file into the are of interest
            arcpy.env.workspace = OFile
            in_features = IFile
            clip_feature = CFile
            out_feature_class = OFile + "/AOI.tif"
            arcpy.Clip_management(in_features, "#", out_feature_class,
                                  clip_feature, "0", "ClippingGeometry")

            ## Add fishnet, the extent will be based on AOI

            lstR = arcpy.Describe(RFile)

            outFeatureClass = OFile + "/AOI_fishnet.shp"

            originCoordinate = str(lstR.extent.XMin) + " " + str(
                lstR.extent.YMin)
            yAxisCoordinate = str(lstR.extent.XMin) + " " + str(
                lstR.extent.YMax)
            cellSizeWidth = '0'
            cellSizeHeight = '0'
            numRows = str(Rnum)
            numColumns = str(Cnum)
            oppositeCoorner = str(lstR.extent.XMax) + " " + str(
                lstR.extent.YMax)
            labels = 'NO_LABELS'
            templateExtent = RFile
            geometryType = 'POLYGON'

            arcpy.CreateFishnet_management(outFeatureClass, originCoordinate,
                                           yAxisCoordinate, cellSizeWidth,
                                           cellSizeHeight, numRows, numColumns,
                                           oppositeCoorner, labels,
                                           templateExtent, geometryType)

            ## Outputting all the AOI points file according to the fishnet grids.
            env.workspace = OFile
            env.overwriteOutput = "True"

            lyrFishnet = arcpy.MakeFeatureLayer_management(
                "AOI_fishnet.shp", "lyr_poly")

            rowsFishnet = arcpy.SearchCursor(lyrFishnet)

            # check if the directory exists, and create one if needed
            directory = OFile + "/out"
            if not os.path.exists(directory):
                os.makedirs(directory)

            print directory

            for row1 in rowsFishnet:

                lyrSelection = arcpy.SelectLayerByAttribute_management(
                    lyrFishnet, "NEW_SELECTION", "\"FID\" = " + str(row1.FID))

                arcpy.CopyFeatures_management(lyrSelection,
                                              "part" + str(row1.FID))
                print "Value of grid " + str(
                    row1.FID) + " has been successfully generated."

                arcpy.Buffer_analysis(
                    "part" + str(row1.FID) + ".shp",
                    OFile + "/part" + str(row1.FID) + str(row1.FID) + ".shp",
                    "10000 Meters", "FULL", "ROUND", "LIST", "FID")

                in_features_p = OFile + "/AOI.tif"
                clip_feature_p = "part" + str(row1.FID) + str(
                    row1.FID) + ".shp"
                out_feature_class_p = "clippart" + str(row1.FID) + ".tif"
                arcpy.Clip_management(in_features_p, "#", out_feature_class_p,
                                      clip_feature_p, "0", "ClippingGeometry")

                print "Created .tif file contains the grid size information:" + "clippart" + str(
                    row1.FID) + ".tif"

                # out put compound analysis through saga gis

                def runCommand_logged(cmd, logstd, logerr):
                    p = subprocess.call(cmd, stdout=logstd, stderr=logerr)

                WORKDIR = OFile
                STDLOG = WORKDIR + os.sep + "import.log"
                ERRLOG = WORKDIR + os.sep + "import.error.log"

                logstd = open(STDLOG, "a")
                logerr = open(ERRLOG, "a")
                dem_in = OFile + "/"
                out_path = OFile + "/out/"

                print "input dem forlder is: " + dem_in
                print "output compound analysis files: " + out_path

                def saga_compound(dem_in, out_path):

                    in_ELEVATION = dem_in + "clippart" + str(row1.FID) + ".tif"
                    out_SHADE = out_path + "out_SHADE" + str(row1.FID) + ".tif"
                    out_SLOPE = out_path + "out_SLOPE" + str(row1.FID) + ".tif"
                    out_HCURV = out_path + "out_HCURV" + str(row1.FID) + ".tif"
                    out_VCURV = out_path + "out_VCURV" + str(row1.FID) + ".tif"
                    out_CONVERGENCE = out_path + "out_CONVERGENCE" + str(
                        row1.FID) + ".tif"
                    out_SINKS = out_path + "out_SINKS" + str(row1.FID) + ".tif"
                    out_CAREA = out_path + "out_CAREA" + str(row1.FID) + ".tif"
                    out_WETNESS = out_path + "out_WETNESS" + str(
                        row1.FID) + ".tif"
                    out_LSFACTOR = out_path + "out_LSFACTOR" + str(
                        row1.FID) + ".tif"
                    out_CHANNELS = out_path + "out_CHANNELS" + str(
                        row1.FID) + ".tif"
                    out_BASINS = out_path + "out_BASINS" + str(
                        row1.FID) + ".tif"
                    out_CHNL_BASE = out_path + "out_CHNL_BASE" + str(
                        row1.FID) + ".tif"
                    out_CHNL_DIST = out_path + "out_CHNL_DIST" + str(
                        row1.FID) + ".tif"
                    out_VALL_DEPTH = out_path + "out_VALL_DEPTH" + str(
                        row1.FID) + ".tif"
                    out_RSP = out_path + "out_RSP" + str(row1.FID) + ".tif"

                    cmd = 'saga_cmd ta_compound 0 -ELEVATION ' + in_ELEVATION + ' -SHADE ' + out_SHADE + ' -SLOPE ' + out_SLOPE + ' -HCURV ' + out_HCURV + ' -VCURV ' + out_VCURV + ' -CONVERGENCE ' + out_CONVERGENCE + ' -SINKS ' + out_SINKS + ' -CAREA ' + out_CAREA + ' -WETNESS ' + out_WETNESS + ' -LSFACTOR ' + out_LSFACTOR + ' -CHANNELS ' + out_CHANNELS + ' -BASINS ' + out_BASINS + ' -CHNL_BASE ' + out_CHNL_BASE + ' -CHNL_DIST ' + out_CHNL_DIST + ' -VALL_DEPTH ' + out_VALL_DEPTH + ' -RSP ' + out_RSP + ' -THRESHOLD 5'  #+ out_THRESHOLD

                    try:
                        runCommand_logged(cmd, logstd, logerr)
                    except Exception, e:
                        logerr.write("Exception thrown")
                        logerr.write("ERROR: %s\n" % e)

                saga_compound(dem_in, out_path)

                #def saga_tif(sdat_in, tif_out)

                print "Compound analysis for part" + str(
                    row1.FID) + " has successfully created"
                print "--------------------------------------------------------------------------"
示例#10
0
    myLogDatei)
myLogPrint(" BS ausschneiden", myLogDatei)
myLogPrint(
    "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++",
    myLogDatei)

if doBS == 1:

    # Blattschnitt Kilometerquadrant (Orthophoto)
    if arcpy.Exists("LAYER"):
        arcpy.gp.delete("LAYER")

    arcpy.MakeFeatureLayer_management(BS_DOP, "LAYER")
    arcpy.SelectLayerByLocation_management("LAYER", "INTERSECT", AOI)
    arcpy.CopyFeatures_management("LAYER", BS_DOB_AOI, "", "0", "0", "0")
    arcpy.SelectLayerByAttribute_management("LAYER", "CLEAR_SELECTION", "")

else:
    myLogPrint("\t uebersprungen", myLogDatei)

# --> Select nur fuer ausgewaehlte Themen
#######################################################################################################################
myLogPrint("\n\n", myLogDatei)
myLogPrint(
    "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++",
    myLogDatei)
myLogPrint(" Auswahl der Dateien nach AOI", myLogDatei)
arcpy.AddMessage("Auswahl der Dateien nach AOI")
myLogPrint(
    "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++",
    myLogDatei)
clamped = 'CLAMPED_TO_GROUND'
QUERY = "'ZONE1','ZONE1_OH1','ZONE1_OH2'"

for i, circuit_GDB in enumerate(GDB_SPLIT):

    cirNAME = split_following_num(os.path.basename(circuit_GDB))

    outKML = os.path.join(FINAL_KMZ,
                          "%s_BAY_AREA_2016_TreeTops.kmz" % (cirNAME))
    inTTs = os.path.join(circuit_GDB,
                         "%s_BAY_AREA_2016_TreeTops_AF" % (cirNAME))

    arcpy.MakeFeatureLayer_management(
        inTTs, "%s_BAY_AREA_2016_TreeTops_AF" % (cirNAME))
    arcpy.SelectLayerByAttribute_management(
        "%s_BAY_AREA_2016_TreeTops_AF" % (cirNAME), "NEW_SELECTION",
        ' "DC_AF" IN (%s) ' % (QUERY))

    result = arcpy.GetCount_management("%s_BAY_AREA_2016_TreeTops_AF" %
                                       (cirNAME))
    count = int(result.getOutput(0))

    if (count) > 0:

        arcpy.ApplySymbologyFromLayer_management(
            "%s_BAY_AREA_2016_TreeTops_AF" % (cirNAME), Symbology_LYR)

        arcpy.LayerToKML_conversion("%s_BAY_AREA_2016_TreeTops_AF" % (cirNAME),
                                    outKML, '', '', '', pixels, dpi, clamped)

        arcpy.AddMessage("CREATING KMZ for {0}".format(
示例#12
0
    "The progress is recalssify disfinal Raster, please wait for several minutes..."
)
final = Reclassify(disfinal, "Value", dismap)
arcpy.SetProgressorLabel(
    "The progress is saving final Raster, please wait for several minutes...")
final.save(outPut + "/fin")
#---------------------------------------Pretreatment to Extrate regions' anea----------------------------------------------#
env.workspace = outPut
mxd = mapping.MapDocument("CURRENT")
df = mapping.ListDataFrames(mxd, "Layers")[0]
refLayer = mapping.ListLayers(mxd, "HWSD*", df)[0]
fin = mapping.Layer(r"fin")
mapping.InsertLayer(df, refLayer, fin, "BEFORE")
#--------------------------------------To extrate suitalbe reginons' area in China----------------------------------------#
# Execute SelectLayerByAttribute
arcpy.SelectLayerByAttribute_management("fin", "NEW_SELECTION", "\"VALUE\" =0")
# Execute RasterToPolygon
arcpy.RasterToPolygon_conversion("fin", "zones.shp", "NO_SIMPLIFY", "value")
# Execute IntersectAnalysis
arcpy.Intersect_analysis(["zones.shp", input1], "Intersect", "ALL", "", "")
# Execute ConversationReference
arcpy.DefineProjection_management(
    "Intersect.shp",
    "PROJCS['Sphere_Aitoff',GEOGCS['GCS_Sphere',DATUM['D_Sphere',SPHEROID['Sphere',6371000.0,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Aitoff'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],UNIT['Meter',1.0]]"
)
# Execute AddFileld
arcpy.CalculateAreas_stats("Intersect.shp", "IntersectAreaField.shp")
# Execute CountAreas
arcpy.Statistics_analysis("IntersectAreaField.shp", "CountAreas1",
                          [["F_AREA", "SUM"]], "PYNAME")
#-------------------------------------To extrate suitalbe reginons' area across the globle--------------------------------#
def CreateFishnetsForFeats(in_polys, out_loc, cell_x=0, cell_y=0):
    '''
    in_polys: input polygon feature class
    out_loc: folder location for new file gdb containing fishnet feature class
    cell_x: cell width
    cell_y: cell height
    '''

    # Set output directroy
    output_dir = os.getcwd() + "/remote/01_data/02_out/samples/"
    arcpy.env.overwriteOutput = True
    # spatial reference
    arcpy.env.outputCoordinateSystem = arcpy.Describe(
        in_polys).spatialReference
    # Loop thru rows of input polygons
    with arcpy.da.SearchCursor(polys, ['SHAPE@', 'OID@', 'ID']) as rows:
        for row in rows:
            ext = row[0].extent
            st = '%f %f' % (ext.XMin, ext.YMin)
            orien = '%f %f' % (ext.XMin, ext.YMax)
            if cell_y == 0:
                n_rows = 1
                cell_y = ext.height
            else:
                n_rows = int((ext.height - (ext.height % cell_y)) / cell_y) + 1
            if cell_x == 0:
                n_cols = 1
                cell_x = ext.width
            else:
                n_cols = int((ext.width - (ext.width % cell_x)) / cell_x) + 1

            # create fishnet
            out = os.path.join(output_dir, 'fish_{0}'.format(row[2]))
            arcpy.CreateFishnet_management(out,
                                           st,
                                           orien,
                                           cell_x,
                                           cell_y,
                                           n_rows,
                                           n_cols,
                                           labels='LABELS')
            where = '"ID"' + '=' + '\'' + str(row[2]) + '\''
            hti_selected = arcpy.SelectLayerByAttribute_management(
                in_polys, "NEW_SELECTION", where)
            selected_samples = arcpy.SelectLayerByLocation_management(
                output_dir + 'fish_{0}'.format(row[2]) + "_label.shp",
                'COMPLETELY_WITHIN', hti_selected)
            arcpy.FeatureClassToFeatureClass_conversion(
                selected_samples, output_dir,
                'samples_{0}'.format(row[2]) + ".shp")
            arcpy.Delete_management(selected_samples)

    # set workspace to output_dir
    arcpy.env.workspace = output_dir
    samples = arcpy.ListFeatureClasses('samples_*')
    targ = samples[0]
    for i, sample in enumerate(samples):
        # add field for original polygon ID
        #fid0 = sample.split('_')[1]
        #fid = fid0.split('.')[0]
        #arcpy.AddField_management(sample, 'HT_ID', 'TEXT')
        #with arcpy.da.UpdateCursor(sample, ['HT_ID']) as rows:
        #    for row in rows:
        #        row[0] = fid
        #        rows.updateRow(row)
        # append fishnets into one feature class
        if i > 0:
            arcpy.Append_management([sample], targ, 'NO_TEST')

            arcpy.Delete_management(sample)
            print('Appended: {0}'.format(sample))
    # deleting unused files
    fish = arcpy.ListFeatureClasses('fish_*')
    arcpy.Delete_management(fish)
    # rename file
    append_shp = arcpy.ListFeatureClasses('samples_*')
    arcpy.Rename_management(append_shp[0], "samples_hti.shp")
    # adding sample ids
    arcpy.AddField_management(in_table=output_dir + 'samples_hti.shp',
                              field_name='sid',
                              field_type='LONG')
    arcpy.CalculateField_management(output_dir + 'samples_hti.shp', 'sid',
                                    "!FID!", "PYTHON")
    arcpy.DeleteField_management(output_dir + 'samples_hti.shp', ['Id'])
    print('Done')

    return
def smallFeaturesCheck(inFds, outFds, mapScaleString, outHtml, tooShortArcMM,
                       tooSmallAreaMM2, tooSkinnyWidthMM):
    # get inputs
    inCaf = os.path.basename(getCaf(inFds))
    inMup = inCaf.replace('ContactsAndFaults', 'MapUnitPolys')
    nameToken = inCaf.replace('ContactsAndFaults', '')
    # set mapscale and mapunits
    mapUnit1 = arcpy.Describe(inFds).spatialReference.linearUnitName
    mapUnit1 = mapUnit1.upper()
    if mapUnit1.find('FOOT') > -1:
        mapUnits = 'feet'
    else:
        mapUnits = 'meters'
    mapScale = 1.0 / float(mapScaleString)

    tooShortArcLength = tooShortMM / 1000.0 / mapScale
    tooSmallPolyArea = tooSmallAreaMM2 / 1e6 / mapScale / mapScale
    #addMsgAndPrint(str(tooSmallAreaMM2)+'  '+str(tooSmallPolyArea))
    tooSkinnyWidth = tooSkinnyWidthMM / 1000 / mapScale
    if mapUnits == 'feet':
        tooShortArcLength = tooShortArcLength * 3.28
        tooSmallPolyArea = tooSmallPolyArea * 3.28 * 3.28
        tooSkinnyWidth = tooSkinnyWidth * 3.28

    tooShortArcs = outFds + '/errors_' + nameToken + 'ShortArcs'
    tooSmallPolys = outFds + '/errors_' + nameToken + 'SmallPolys'
    tooSmallPolyPoints = outFds + '/errors_' + nameToken + 'SmallPolyPoints'
    tooSkinnyPolys = outFds + '/errors_' + nameToken + 'SkinnyPolys'
    testAndDelete(tooShortArcs)
    testAndDelete(tooSmallPolys)
    testAndDelete(tooSmallPolyPoints)
    testAndDelete(tooSkinnyPolys)

    outHtml.write('<h3>Small feature inventory</h3>\n')
    outHtml.write('&nbsp;&nbsp; map scale = 1:' + mapScaleString + '<br>\n')

    # short arcs
    testAndDelete('cafLayer')
    arcpy.MakeFeatureLayer_management(
        inFds + '/' + inCaf, 'cafLayer',
        'Shape_Length < ' + str(tooShortArcLength))
    arcpy.CopyFeatures_management('cafLayer', tooShortArcs)
    outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooShortArcs)) +
                  ' arcs shorter than ' + str(tooShortMM) + ' mm<br>\n')
    if numberOfRows(tooShortArcs) == 0:
        testAndDelete(tooShortArcs)
    if arcpy.Exists(inMup):
        # small polys
        addMsgAndPrint('  tooSmallPolyArea = ' + str(tooSmallPolyArea))
        testAndDelete('mupLayer')
        arcpy.MakeFeatureLayer_management(
            inFds + '/' + inMup, 'mupLayer',
            'Shape_Area < ' + str(tooSmallPolyArea))
        arcpy.CopyFeatures_management('mupLayer', tooSmallPolys)
        addMsgAndPrint('  ' + str(numberOfRows(tooSmallPolys)) +
                       ' too-small polygons')
        arcpy.FeatureToPoint_management(tooSmallPolys, tooSmallPolyPoints,
                                        'INSIDE')
        outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooSmallPolys)) +
                      ' polys with area less than ' + str(tooSmallAreaMM2) +
                      ' mm<sup>2</sup><br>\n')
        # sliver polys
        arcpy.CopyFeatures_management(inFds + '/' + inMup, tooSkinnyPolys)
        testAndDelete('sliverLayer')
        arcpy.MakeFeatureLayer_management(tooSkinnyPolys, 'sliverLayer')
        arcpy.AddField_management('sliverLayer', 'AreaDivLength', 'FLOAT')
        arcpy.CalculateField_management('sliverLayer', 'AreaDivLength',
                                        "!Shape_Area! / !Shape_Length!",
                                        "PYTHON")
        arcpy.SelectLayerByAttribute_management(
            'sliverLayer', 'NEW_SELECTION',
            "AreaDivLength >= " + str(tooSkinnyWidth))
        arcpy.DeleteFeatures_management('sliverLayer')
        addMsgAndPrint('  tooSkinnyPolyWidth = ' + str(tooSkinnyWidth))
        addMsgAndPrint('  ' + str(numberOfRows(tooSkinnyPolys)) +
                       ' too-skinny polygons')

        outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooSkinnyPolys)) +
                      ' polys with area/length ratio less than ' +
                      str(tooSkinnyWidth) + ' ' + mapUnits + '<br>\n')
        for fc in (tooSkinnyPolys, tooSmallPolys):
            if numberOfRows(fc) == 0: testAndDelete(fc)
    else:
        outHtml.write('&nbsp;&nbsp; No MapUnitPolys feature class<br>\n')

        for xx in 'cafLayer', 'mupLayer', 'sliverLayer':
            testAndDelete(xx)

    return
#This reclassifies the Kriging Output from a raster range of values to individual values.
outReclass1 = Reclassify(
    Krig_Output, "Value",
    RemapValue([[0, 1, 1], [1, 2, 2], [2, 3, 3], [3, 4, 4], [4, 5.5, 5],
                [5.5, 10000000000, "NoData"]]))
outReclass1.save(Reclass2)

#This turns the raster into a polygon.
arcpy.RasterToPolygon_conversion(Reclass2, Quakesim_Final, "NO_SIMPLIFY",
                                 "VALUE")

#This turns the polygon into a layer.
arcpy.MakeFeatureLayer_management(Quakesim_Final, "Quakesim_lyr")

#These allow us to search for infrastructure data that is located within a user defined area (Where-Clause).
arcpy.SelectLayerByAttribute_management("Quakesim_lyr", "NEW_SELECTION",
                                        """"GRIDCODE"=2""")
arcpy.SelectLayerByAttribute_management("Quakesim_lyr", "ADD_TO_SELECTION",
                                        """"GRIDCODE"=3""")
arcpy.SelectLayerByAttribute_management("Quakesim_lyr", "ADD_TO_SELECTION",
                                        """"GRIDCODE"=4""")
arcpy.SelectLayerByAttribute_management("Quakesim_lyr", "ADD_TO_SELECTION",
                                        """"GRIDCODE"=5""")
if Infrastructure_1_Merged:
    arcpy.SelectLayerByLocation_management("Infra1_lyr", "INTERSECT",
                                           "Quakesim_lyr")
    arcpy.CopyFeatures_management("Infra1_lyr", "Damaged_Infrastructure_1.shp")
if Infrastructure_2_Merged:
    arcpy.SelectLayerByLocation_management("Infra2_lyr", "INTERSECT",
                                           "Quakesim_lyr")
    arcpy.CopyFeatures_management("Infra2_lyr", "Damaged_Infrastructure_2.shp")
if Infrastructure_3_Merged:
    msg = "Joining max gridcode table to temporary feature class"
    print(msg)
    logging.info(msg)
    arcpy.JoinField_management(in_data=maxhi_counties_join_tmp + ".shp",
                               in_field="GEOID_dbl",
                               join_table=maxhi_counties_summary,
                               join_field="GEOID_dbl",
                               fields=["MAX_gridco"])

    #Selecting Heat Index values above threshold
    threshold = 100
    msg = "Selecting Heat Index values above threshold of " + str(threshold)
    print(msg)
    logging.info(msg)
    qry = "MAX_gridco >= " + str(threshold)
    heat_index_over_threshold_lyr = arcpy.SelectLayerByAttribute_management(
        maxhi_counties_join_tmp + ".shp", "NEW_SELECTION", qry)

    msg = "Copying to " + max_hi_forecast_interpolated_valid_poly
    print(msg)
    logging.info(msg)
    arcpy.management.CopyFeatures(heat_index_over_threshold_lyr,
                                  maxhi_counties_join_final, '', None, None,
                                  None)

    msg = "zipping shapefiles..."
    print(msg)
    logging.info(msg)

    time.sleep(sleep_interval)
    #all extensions of shapefile
    shp_extensions = ["cpg", "dbf", "prj", "sbn", "sbx", "shp", "xml", "shx"]
示例#17
0
Output_RCL_Subset_Layer = "RCL_Subset_Layer"
Output_RCL_Subset_Layer_with_data_selection = Output_RCL_Subset_Layer
Output_RCL_Subset_Layer_with_selection_set_to_a_value_of_1 = Output_RCL_Subset_Layer_with_data_selection
Output_RCL_Subset_Layer_with_reversed_data_selection = Output_RCL_Subset_Layer_with_selection_set_to_a_value_of_1
Output_Subset_Layer_with_selected_areas_representing_1_and_non_selected_areas_representing_0 = Output_RCL_Subset_Layer_with_reversed_data_selection

# Process: Make Feature Layer
arcpy.MakeFeatureLayer_management(
    Input_RCL_Subset, Output_RCL_Subset_Layer, "", "",
    "OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;RCL_ID RCL_ID VISIBLE NONE;LOCAL_ID LOCAL_ID VISIBLE NONE;VDOT_EDGE_ID VDOT_EDGE_ID VISIBLE NONE;MFIPS MFIPS VISIBLE NONE;GEOMETRY_EDIT_TYPE GEOMETRY_EDIT_TYPE VISIBLE NONE;GEOMETRY_EFFECTIVE_DATE GEOMETRY_EFFECTIVE_DATE VISIBLE NONE;GEOMETRY_SOURCE GEOMETRY_SOURCE VISIBLE NONE;GEOMETRY_EDIT_DATE GEOMETRY_EDIT_DATE VISIBLE NONE;LEFT_FROM_ADDRESS LEFT_FROM_ADDRESS VISIBLE NONE;LEFT_TO_ADDRESS LEFT_TO_ADDRESS VISIBLE NONE;RIGHT_FROM_ADDRESS RIGHT_FROM_ADDRESS VISIBLE NONE;RIGHT_TO_ADDRESS RIGHT_TO_ADDRESS VISIBLE NONE;ADDRESS_RANGE_FORMAT ADDRESS_RANGE_FORMAT VISIBLE NONE;ADDRESS_RANGE_SOURCE ADDRESS_RANGE_SOURCE VISIBLE NONE;ADDRESS_RANGE_EDIT_DATE ADDRESS_RANGE_EDIT_DATE VISIBLE NONE;STREET_PREMODIFIER STREET_PREMODIFIER VISIBLE NONE;STREET_PREFIX_DIRECTION STREET_PREFIX_DIRECTION VISIBLE NONE;STREET_NAME STREET_NAME VISIBLE NONE;STREET_SUFFIX STREET_SUFFIX VISIBLE NONE;STREET_SUFFIX_DIRECTION STREET_SUFFIX_DIRECTION VISIBLE NONE;STREET_POSTMODIFIER STREET_POSTMODIFIER VISIBLE NONE;STREET_NAME_FULL STREET_NAME_FULL VISIBLE NONE;STREET_NAME_SOURCE STREET_NAME_SOURCE VISIBLE NONE;STREET_NAME_EDIT_DATE STREET_NAME_EDIT_DATE VISIBLE NONE;MTFCC MTFCC VISIBLE NONE;SEGMENT_TYPE SEGMENT_TYPE VISIBLE NONE;LOCAL_SPEED_MPH LOCAL_SPEED_MPH VISIBLE NONE;DUAL_CARRIAGEWAY DUAL_CARRIAGEWAY VISIBLE NONE;ONE_WAY ONE_WAY VISIBLE NONE;REVERSIBLE REVERSIBLE VISIBLE NONE;SEGMENT_EXISTS SEGMENT_EXISTS VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;NH_BUFF_M NH_BUFF_M VISIBLE NONE;NH_SURFWIDTH_FLAG NH_SURFWIDTH_FLAG VISIBLE NONE;NH_COMMENTS NH_COMMENTS VISIBLE NONE;VDOT_RTE_TYPE_CD VDOT_RTE_TYPE_CD VISIBLE NONE;VDOT_SURFACE_WIDTH_MSR VDOT_SURFACE_WIDTH_MSR VISIBLE NONE;VDOT_TRAFFIC_AADT_NBR VDOT_TRAFFIC_AADT_NBR VISIBLE NONE;NH_BUFF_FT NH_BUFF_FT VISIBLE NONE;NH_CONSITE NH_CONSITE VISIBLE NONE;NH_LINE_EDIT NH_LINE_EDIT VISIBLE NONE;NH_BUFF_EDIT NH_BUFF_EDIT VISIBLE NONE;NH_REV_EDITOR NH_REV_EDITOR VISIBLE NONE;NH_REV_DATE NH_REV_DATE VISIBLE NONE"
)

# Process: Select Layer By Location
arcpy.SelectLayerByLocation_management(Output_RCL_Subset_Layer, "INTERSECT",
                                       Input_Shapefile, "", "NEW_SELECTION",
                                       "NOT_INVERT")

# Process: Calculate Field
arcpy.CalculateField_management(Output_RCL_Subset_Layer_with_data_selection,
                                "NH_CONSITE", "1", "VB", "")

# Process: Select Layer By Attribute
arcpy.SelectLayerByAttribute_management(
    Output_RCL_Subset_Layer_with_selection_set_to_a_value_of_1,
    "SWITCH_SELECTION", "")

# Process: Calculate Field (2)
arcpy.CalculateField_management(
    Output_RCL_Subset_Layer_with_reversed_data_selection, "NH_CONSITE", "0",
    "VB", "")
for row in sc:

    try:

        #Get Parcel ID value
        value = row.ID
        count = row.FID + 1
        FlrCnt = row.getValue(FloorField)

        #Get bare earth elevation of parcel
        arcpy.SetProgressorLabel(
            "Changing elevation footprint to bare earth elevation for point " +
            str(value))
        SQL = "Id =" + str(value) + ""
        arcpy.SelectLayerByAttribute_management(PointsFL, "NEW_SELECTION", SQL)
        arcpy.SelectLayerByLocation_management(footprintFL, "INTERSECT",
                                               PointsFL)

        arcpy.env.workspace = IntermediateFiles  #need to change workspace so that the .save files get saved correctly
        outExtractByMask = ExtractByMask(BareElevation, footprintFL)
        outExtractByMask.save(IntermediateFiles + "\\ebm_" + str(value))

        ElevAvg = ElevAvgTables + "\\avgelev_" + str(value) + ".dbf"
        arcpy.Statistics_analysis(outExtractByMask, ElevAvg,
                                  [["VALUE", "MEAN"]])

        arcpy.AddField_management(ElevAvg, "Pt_ID", "SHORT")
        arcpy.CalculateField_management(ElevAvg, "Pt_ID", value)
        arcpy.AddJoin_management(PointsFL, "Id", ElevAvg, "Pt_ID",
                                 "KEEP_COMMON")
 # Nu de layer zoeken om de definition query er op zetten.
 mxd = arcpy.mapping.MapDocument("CURRENT")
 Delyr = ''
 for lyr in arcpy.mapping.ListLayers(mxd):
     if lyr.longName == Profl:
         Delyr = lyr
 naamLijst = []
 with arcpy.da.SearchCursor(Profl, [PnmKol, Kol]) as cursor2:
     for row2 in cursor2:
         arcpy.AddMessage("Naam: " + str(row2[0]))
         naamLijst.append(row2[0])
 del row2, cursor2, mxd, lyr
 # Als er al eens een profiel is geselecteerd de kolom Kol leeg maken als optie is aangevinkt
 if RemRP == 'true':
     arcpy.AddMessage("oude representatieve profielen verwijderen...")
     arcpy.SelectLayerByAttribute_management(Delyr, "CLEAR_SELECTION")
     waar = Kol + " = 'Ja'"
     no = len(
         list(i for i in arcpy.da.SearchCursor(Profl, [PnmKol, Kol],
                                               where_clause=waar)))
     if no > 0:
         with arcpy.da.UpdateCursor(Profl, [PnmKol, Kol],
                                    where_clause=waar) as cursor3:
             for row3 in cursor3:
                 row3[1] = 'Nee'
                 cursor3.updateRow(row3)
         del row3, cursor3
 else:
     arcpy.AddMessage("oude representatieve profielen blijven behouden...")
     # Dus de naamlijst aanvullen met de al aanwezige repr. profielen.
     #arcpy.AddMessage("naamLijst:  "+str(naamLijst))
def DegViewshed(FLOOR, HEIGHT):
    """Calculates a parcels viewshed, in degrees"""

    #Select Record
    arcpy.SelectLayerByAttribute_management(PointsFL, "NEW_SELECTION", SQL)

    #Set Observer Height (OffSETA)
    arcpy.CalculateField_management(PointsFL, "OFFSETA", HEIGHT, "PYTHON_9.3")

    #perform viewshed analysis
    arcpy.SetProgressorLabel("Performing Viewshed Analysis for point " +
                             str(value))
    outViewshed = IntermediateFiles + "\\vs_" + str(FLOOR) + "_" + str(
        value).split(".")[0]
    arcpy.Viewshed_3d(outCon, PointsFL, outViewshed)

    #convert viewshed to polygon
    arcpy.SetProgressorLabel("Converting viewshed" + str(value) +
                             " on floor " + str(FLOOR) + " to polygon.")
    OutPoly = IntermediateFiles + "\\" + os.path.basename(outViewshed).split(
        ".")[0] + "_poly.shp"
    arcpy.RasterToPolygon_conversion(outViewshed, OutPoly)

    #Intersect viewshed polygon with buffer clip
    #This will allow the viewshed poly to inherit attribute fields needed for later analysis
    FinalView = Final_Floor_Viewsheds + "\\FinalViewshed_" + str(
        FLOOR) + "_" + str(value) + ".shp"
    arcpy.Intersect_analysis([BufferClip, OutPoly], FinalView)

    #Select features in viewshed polygon with Gridcode = 1
    #If no records with grid = 1 exist, scriptwill skip to setting viewshed in degrees to 0

    #Convert viewshed polygon to layer
    ViewshedLayer = outName(FinalView, "lyr")
    arcpy.MakeFeatureLayer_management(FinalView, ViewshedLayer)

    #Select records with gridcode = 1
    arcpy.SelectLayerByAttribute_management(ViewshedLayer, "NEW_SELECTION",
                                            "GRIDCODE =" + str(1) + "")

    #Get count of the # of records selected in viewshed poly layer
    VsLyrCount = int(arcpy.GetCount_management(ViewshedLayer).getOutput(0))

    NoView = SummaryTables + "\\summary_" + str(FLOOR) + "_" + str(
        value) + ".dbf"
    YesView = SummaryTables + "\\summary_" + str(FLOOR) + "_" + str(
        value) + ".dbf"
    StatsField0 = [["GRIDCODE", "SUM"]]
    CaseField0 = ["ID", "SPOT", FloorField]
    StatsField1 = [["LENGTH", "SUM"]]
    CaseField1 = ["GRIDCODE", "ID", "SPOT", FloorField]
    VsArcLengths = ArcLengths + "\\ArcLength_" + str(FLOOR) + "_" + str(
        value) + ".shp"

    if VsLyrCount == 0:  #no viewable areas exist
        arcpy.SelectLayerByAttribute_management(ViewshedLayer,
                                                "CLEAR_SELECTION")
        arcpy.SetProgressorLabel(
            "Calculating viewshed statistics for parcel " + str(value))
        arcpy.Statistics_analysis(ViewshedLayer, NoView, StatsField0,
                                  CaseField0)

        #Add field to summary table to hold viewshed value of 0
        #Add field to note which floor viewshed corresponds to
        arcpy.AddField_management(NoView, "FLR_RAN", "SHORT")
        arcpy.AddField_management(NoView, "VIEW_" + Year, "DOUBLE")
        arcpy.AddField_management(NoView, "OFFSETA", "SHORT")
        arcpy.CalculateField_management(NoView, "FLR_RAN", FLOOR)
        arcpy.CalculateField_management(NoView, "VIEW_" + Year, 0)
        arcpy.CalculateField_management(NoView, "OFFSETA", HEIGHT)

    else:  #Calculate viewshed, in degrees, for selected records
        arcpy.SetProgressorLabel("Getting arc length for parcel" + str(value) +
                                 " at the " + str(FLOOR) + " floor.")
        arcpy.Intersect_analysis(
            [BufferLine, ViewshedLayer], VsArcLengths, "", 10,
            "LINE")  #Intersect with any line within 10 ft.
        arcpy.AddField_management(VsArcLengths, "Length", "DOUBLE")
        arcpy.CalculateField_management(VsArcLengths, "Length",
                                        "!SHAPE.length@miles!", "PYTHON_9.3")
        arcpy.Statistics_analysis(VsArcLengths, YesView, StatsField1,
                                  CaseField1)

        #Add fields to output summary table
        arcpy.AddField_management(YesView, "FLR_RAN", "SHORT")
        arcpy.AddField_management(YesView, "VIEW_" + Year, "DOUBLE")
        arcpy.AddField_management(YesView, "OFFSETA", "SHORT")
        arcpy.CalculateField_management(YesView, "FLR_RAN", FLOOR)
        arcpy.CalculateField_management(YesView, "OFFSETA", HEIGHT)
        arcpy.CalculateField_management(YesView, "VIEW_" + Year,
                                        "((!SUM_LENGTH!/3.14)*180)",
                                        "PYTHON_9.3")
        arcpy.SelectLayerByAttribute_management(ViewshedLayer,
                                                "CLEAR_SELECTION")
示例#21
0
def network_disruption_prep():

    start_time = datetime.datetime.now()
    print('\nStart at ' + str(start_time))

    # SETUP
    print('Prompting for configuration ...')

    # Get path to input network
    network = get_network()

    road_y_n, rail_y_n = get_mode_list()

    mode_list = []
    if road_y_n == 'y':
        mode_list.append("road")
    if rail_y_n == 'y':
        mode_list.append("rail")

    input_exposure_grid = get_input_exposure_data()

    input_exposure_grid_field = get_input_exposure_data_field()

    search_tolerance = get_search_tolerance()

    output_dir = get_output_dir()

    # Hard-coding this for now, if more options are added, this can be revisited
    link_availability_approach = 'binary'

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    output_gdb = 'disruption_analysis.gdb'
    full_path_to_output_gdb = os.path.join(output_dir, output_gdb)

    if arcpy.Exists(full_path_to_output_gdb):
        arcpy.Delete_management(full_path_to_output_gdb)
        print('Deleted existing ' + full_path_to_output_gdb)
    arcpy.CreateFileGDB_management(output_dir, output_gdb)

    arcpy.env.workspace = full_path_to_output_gdb

    # MAIN
    # ---------------------------------------------------------------------------

    txt_output_fields = ['mode', 'unique_link_id', 'link_availability', input_exposure_grid_field]

    # Export to txt file
    csv_out = os.path.join(output_dir, "disruption.csv")

    with open(csv_out, "w", newline='') as f:
        wr = csv.writer(f)
        wr.writerow(txt_output_fields)

    for mode in mode_list:

        # Extract raster cells that overlap the modes of interest
        print('Extracting exposure values that overlap network for mode: {} ...'.format(mode))
        arcpy.CheckOutExtension("Spatial")
        output_extract_by_mask = arcpy.sa.ExtractByMask(input_exposure_grid, os.path.join(network, mode))
        output_extract_by_mask.save(mode + "_exposure_grid_extract")
        arcpy.CheckInExtension("Spatial")

        # Export raster to point
        print('Converting raster to point for mode: {} ...'.format(mode))
        arcpy.RasterToPoint_conversion(mode + "_exposure_grid_extract", os.path.join(
            full_path_to_output_gdb, mode + "_exposure_grid_points"), input_exposure_grid_field)

        # Setup field mapping so that maximum exposure at each network segment is captured
        fms = arcpy.FieldMappings()

        fm = arcpy.FieldMap()
        fm.addInputField(mode + "_exposure_grid_points", "grid_code")
        fm.mergeRule = 'Maximum'

        fms.addFieldMap(fm)

        # Spatial join to network, selecting highest exposure value for each network segment
        print('Identifying maximum exposure value for each network segment for mode: {} ...'.format(mode))
        arcpy.SpatialJoin_analysis(os.path.join(network, mode), mode + "_exposure_grid_points",
                                   mode + "_with_exposure",
                                   "JOIN_ONE_TO_ONE", "KEEP_ALL",
                                   fms,
                                   "WITHIN_A_DISTANCE_GEODESIC", search_tolerance)

        # Add new field to store extent of exposure
        print('Calculating exposure levels for mode: {} ...'.format(mode))

        arcpy.AddField_management(mode + "_with_exposure", "link_availability", "Float")
        arcpy.AddField_management(mode + "_with_exposure", "comments", "Text")

        arcpy.MakeFeatureLayer_management(mode + "_with_exposure", mode + "_with_exposure_lyr")

        # Convert NULLS to 0 first
        arcpy.SelectLayerByAttribute_management(mode + "_with_exposure_lyr", "NEW_SELECTION", "grid_code IS NULL")
        arcpy.CalculateField_management(mode + "_with_exposure_lyr", "grid_code", 0, "PYTHON_9.3")
        arcpy.SelectLayerByAttribute_management(mode + "_with_exposure_lyr", "CLEAR_SELECTION")

        if link_availability_approach == 'binary':
            # 0 = full exposure/not traversable. 1 = no exposure/link fully available
            arcpy.SelectLayerByAttribute_management(mode + "_with_exposure_lyr", "NEW_SELECTION", "grid_code > 0")
            arcpy.CalculateField_management(mode + "_with_exposure_lyr", "link_availability", 0, "PYTHON_9.3")
            arcpy.SelectLayerByAttribute_management(mode + "_with_exposure_lyr", "SWITCH_SELECTION")
            arcpy.CalculateField_management(mode + "_with_exposure_lyr", "link_availability", 1, "PYTHON_9.3")

        print('Finalizing outputs ... for mode: {} ...'.format(mode))

        # Rename grid_code back to the original exposure grid field provided in raster dataset.
        arcpy.AlterField_management(mode + "_with_exposure", 'grid_code', input_exposure_grid_field)

        fields = ['OBJECTID', 'link_availability', input_exposure_grid_field]

        # Only worry about disrupted links--
        # anything with a link availability of 1 is not disrupted and doesn't need to be included.
        arcpy.SelectLayerByAttribute_management(mode + "_with_exposure_lyr", "NEW_SELECTION", "link_availability <> 1")

        with open(csv_out, "a", newline='') as csv_file:
            with arcpy.da.SearchCursor(mode + "_with_exposure_lyr", fields) as cursor:
                for row in cursor:
                    converted_list = [str(element) for element in row]
                    joined_string = ",".join(converted_list)
                    csv_file.write("{},{}\n".format(mode, joined_string))

    end_time = datetime.datetime.now()
    total_run_time = end_time - start_time
    print("\nEnd at {}.  Total run time {}".format(end_time, total_run_time))
示例#22
0
temp = r"D:\0_PROJECTS\3_QingHai_MaDuo_Project\MaDuoNewBorderTask0107\Result\temp"
fcs = arcpy.ListFeatureClasses("*","All","")
print "start"
for fc in fcs:
    print fc
    # Set local variables
    inFeatures = fc
    tempLayer = os.path.join(temp,str(fc[:-4]) + "blocklayer")
    expression = '"SampleName" = \'1\''   #设置筛选条件,注意转义字符 用   \    转义

    #exclusionExpression = ''
     
    # Execute MakeFeatureLayer
    arcpy.MakeFeatureLayer_management(inFeatures, tempLayer)

    print "Step 1 finished!"
     
    # Execute SelectLayerByAttribute to define features to be eliminated
    arcpy.SelectLayerByAttribute_management(tempLayer, "NEW_SELECTION", expression)

    print "Step 2 finished!"
     
    # Execute Eliminate
    outFC_name = str(fc[:-4])
    outFeatureClass = os.path.join(outFeaturePath,outFC_name)
    arcpy.Eliminate_management(tempLayer, outFeatureClass, "AREA",)

    print ""
    
print "All finished"
示例#23
0
    for row in specOverlayCursor:
        if row[0].split(' ')[0] not in overlayTypes:
            overlayTypes.append(row[0].split(' ')[0])
    del specOverlayCursor

    #Create a layer based off of each overlay type saved from previous list
    overlayTotal = len(overlayTypes)
    overlayCount = 0
    arcpy.MakeFeatureLayer_management(zoningFC, zoningLayer)
    parcelDict = {}
    for overlayType in overlayTypes:
        overlayCount += 1
        print('Starting ' + overlayType + ' overlay ' + str(overlayCount) +
              ' of ' + str(overlayTotal))
        arcpy.SelectLayerByAttribute_management(
            zoningLayer, 'NEW_SELECTION',
            "OVERLAY_NAME LIKE '" + overlayType + "%'")
        arcpy.MakeFeatureLayer_management(zoningLayer, zoningCurrent)

        currentTract = 'CurrentDistrict'
        tempParcels = 'TempParcels'
        tempZone = 'TempZone'
        districtCount = 0
        districtTotal = int(
            arcpy.GetCount_management(Council_Districts_Local).getOutput(0))
        arcpy.CalculateField_management(Council_Districts_Local, 'DISTRICT',
                                        '!DISTRICT!.strip('
                                        ')', 'PYTHON_9.3')
        districtTileCursor = arcpy.da.SearchCursor(Council_Districts_Local,
                                                   'DISTRICT')
示例#24
0
def start(parameters):
    arcpy.AddMessage('Workspace: ' + parameters["output_folder"])
    arcpy.AddMessage('GDB location: ' + parameters["gdb_location"])
    arcpy.AddMessage('Start features: ' + parameters["start_features"])
    arcpy.AddMessage('Stream level: ' + str(parameters["max_level"]))
    ### START PROCESSING ###
    ### Processing Phase 0: Create local parameters
    arcpy.env.workspace = parameters["output_folder"]
    # retrieve path based on parameter_gdb_location and dataset name
    original_dataset_location = get_location(parameters["gdb_location"],
                                             config.original_dataset_name)
    # retrieve path based on parameter_gdb_location and fc name
    original_nhdflowline_location = get_location(
        original_dataset_location, config.original_nhdflowline_name)
    # retrieve path based on + and table name
    original_vaatable_location = get_location(parameters["gdb_location"],
                                              config.original_vaatable_name)
    # takes the rootname of parameter_gdb_location and
    gdb_original_rootname = get_fname(parameters["gdb_location"])
    # get name of result directory
    result_rootname = gdb_original_rootname + config.result_suffix
    # get pathname of result directory
    result_subdir_location = get_location(arcpy.env.workspace, result_rootname)
    # determine path of result directory
    result_gdb_location = get_location(result_subdir_location, result_rootname,
                                       '.gdb')
    # determine name of result geodatabase
    # add new parameter to keep suffix
    result_gdb_filename = get_fname(result_gdb_location)
    # get pathnames of result feature classes
    result_dataset_location = get_location(result_gdb_location,
                                           config.result_dataset_name)
    result_tributaries_segments_location = get_location(
        result_dataset_location, config.result_tributaries_segments_fcname)
    result_tributaries_dissolved_location = get_location(
        result_dataset_location, config.result_tributaries_dissolved_fcname)
    ### Processing Phase 1: Pre-tests
    # do usable start features exist
    arcpy.AddMessage('Cross checking start features...')
    start_nhdplusids = find_start_features(original_nhdflowline_location,
                                           parameters["start_features"])
    # if there are usable start features
    if len(start_nhdplusids) > 0:
        # find tributaries that match parameters, if any
        vaa_lists = get_vaa_lists(config.original_nhdflowline_name,
                                  original_vaatable_location, start_nhdplusids,
                                  config.result_all_levels,
                                  parameters["max_level"])
        start_features_hydroseq_list = vaa_lists[0]
        vaa_path_id_list = vaa_lists[1]
        vaa_hydroseq_list = vaa_lists[2]
        starting_tributaries_paths = get_starting_tributaries(
            start_features_hydroseq_list, vaa_hydroseq_list)
        arcpy.AddMessage('Searching for tributaries up to stream level ' +
                         str(parameters["max_level"]) + "...")
        all_tributaries_paths = get_all_tributaries_from_path(
            starting_tributaries_paths, vaa_path_id_list)
        # if tributaries exist
        if len(all_tributaries_paths) > 0:
            arcpy.AddMessage('Tributaries found.')
            ### Processing Phase 2: Prepare files
            os.mkdir(result_subdir_location)
            # add message
            arcpy.AddMessage(result_rootname + ' directory created.')
            arcpy.CreateFileGDB_management(
                out_folder_path=result_subdir_location,
                out_name=result_gdb_filename,
                out_version="CURRENT")
            arcpy.AddMessage(result_gdb_filename + ' geodatabase created.')
            arcpy.CreateFeatureDataset_management(
                out_dataset_path=result_gdb_location,
                out_name=config.result_dataset_name,
                spatial_reference=original_dataset_location)
            arcpy.AddMessage(config.result_dataset_name + ' dataset created.')
            ### Processing Phase 3: Export Tributaries
            arcpy.AddMessage('Exporting tributaries...')
            export_matching_paths(config.original_nhdflowline_name,
                                  original_vaatable_location,
                                  all_tributaries_paths,
                                  result_tributaries_segments_location)
            tributary_segment_count = arcpy.GetCount_management(
                result_tributaries_segments_location)[0]
            arcpy.AddMessage(tributary_segment_count +
                             " tributary segments exported to " +
                             config.result_dataset_name + '/' +
                             config.result_tributaries_segments_fcname + '.')
            ### Processing Phase 4: Add fields
            add_fields(result_tributaries_segments_location,
                       config.vaa_segment_fields)
            add_fields(result_tributaries_segments_location,
                       config.fcode_gnisid_fields)
            add_fields(result_tributaries_segments_location,
                       config.fcode_pathid_fields)
            arcpy.MakeFeatureLayer_management(
                result_tributaries_segments_location,
                "result_tributaries_segments_lyr")
            ### Processing Phase 5: Process fields
            arcpy.AddMessage('Processing fields...')
            copy_vaa_values("result_tributaries_segments_lyr",
                            original_vaatable_location,
                            config.vaa_segment_fields)
            process_gfcode_fields("result_tributaries_segments_lyr",
                                  config.fcode_gnisid_fields)
            process_pfcode_fields("result_tributaries_segments_lyr",
                                  config.fcode_pathid_fields)
            if config.result_dissolve:
                ### Processing Phase 6: Dissolve to streams
                arcpy.AddMessage('Dissolving segments to streams...')
                arcpy.SelectLayerByAttribute_management(
                    in_layer_or_view="result_tributaries_segments_lyr",
                    selection_type="CLEAR_SELECTION")
                arcpy.Dissolve_management(
                    in_features=result_tributaries_segments_location,
                    out_feature_class=result_tributaries_dissolved_location,
                    dissolve_field=config.dissolve_fields)
                # internal check
                tributary_stream_count = arcpy.GetCount_management(
                    result_tributaries_dissolved_location)[0]
                arcpy.AddMessage(tributary_stream_count +
                                 " tributary streams exported to " +
                                 config.result_dataset_name + '/' +
                                 config.result_tributaries_dissolved_fcname +
                                 '.')
        else:
            arcpy.AddMessage('Checks complete - No tributaries found')
    else:
        arcpy.AddMessage('Checks complete - No matching start features found')
    ### END PROCESSING ###


### TEST IN ARCPY ###

# nice_params = {
#     "output_folder": 'C:/Workspace/',
#     "gdb_location": 'D:/GCMRC/NHD Plus HR Process Datasets/00 Original Files/NHDPLUS_H_1501_HU4_GDB/NHDPLUS_H_1501_HU4_GDB.gdb',
#     "start_features": 'D:/GCMRC/NHD Plus HR Process Datasets/Basin 15 Start Features/start_features_1501.shp',
#     "max_level": 7
# }
#
# start(nice_params)
def colorPolygons(in_feature_class, feature_field, out_feature_class):
    arcpy.env.overwriteOutput = True

    ##    # Create temporary directory
    ##    temp_dir = os.path.join(tempfile.gettempdir(), 'zonal')
    ##    index = 0
    ##    while os.path.exists(temp_dir):
    ##        temp_dir = os.path.join(tempfile.gettempdir(), 'zonal%d' % index)
    ##        index += 1
    ##    os.mkdir(temp_dir)

    # Initialize variables
    temp_features = 'in_memory/dissolve'
    bldissolved = False
    # Dissolve on non-ObjectID field
    desc = arcpy.Describe(in_feature_class)
    arcpy.AddMessage("Dissolving features.")
    if hasattr(desc, "OIDFieldName"):
        if feature_field != desc.OIDFieldName:
            arcpy.Dissolve_management(in_feature_class, temp_features, \
                feature_field)
            bldissolved = True
        else:
            temp_features = in_feature_class
    else:
        arcpy.Dissolve_management(in_feature_class, temp_features, \
            feature_field)
        bldissolved = True
    # Get ObjectID field from dissolved
    if bldissolved:
        desc = arcpy.Describe(temp_features)
        oid_field = desc.OIDFieldName
    else:
        oid_field = feature_field

    # Calculate polygon contiguity
    arcpy.AddMessage("Identifying overlapping polygons...")
    arcpy.env.outputMFlag = "Disabled"
    result = arcpy.PolygonNeighbors_analysis(temp_features,
                                             'in_memory/neighbors', oid_field,
                                             "AREA_OVERLAP", "BOTH_SIDES")
    if 'WARNING 000117:' in result.getMessages(1):
        arcpy.AddError("Input feature zone data: {} does not contain "
                       "overlapping features.".format(temp_features))
        sys.exit(1)

    arcpy.AddMessage("Identified overlapping polygons.")
    arcpy.AddMessage("Calculating feature subsets without overlaps...")

    # Retrieve as array with columns src_FID and nbr_FID
    arr = arcpy.da.TableToNumPyArray(
        'in_memory/neighbors',
        ['src_%s' % oid_field, 'nbr_%s' % oid_field])
    arr = numpy.array(arr.tolist())

    # Retrieves the colors of the neighboring nodes
    def get_colors(nodes, neighbors):
        colors = set()
        for neighbor in neighbors:
            colors.add(nodes[neighbor][0])
        colors.difference([0])
        return colors

    # Create a new color
    def get_new_color(colors):
        return max(colors) + 1 if len(colors) > 0 else 1

    # Chooses from existing colors randomly
    def choose_color(colors):
        return random.choice(list(colors))

    # Sort source FIDs in descending order by number of neighbors
    arr_uniq = numpy.unique(arr[:, 0])
    arr_count = numpy.zeros_like(arr_uniq)
    for index in range(arr_uniq.size):
        arr_count[index] = numpy.count_nonzero(arr[:, 0] == arr_uniq[index])
    arr_ind = numpy.argsort(arr_count)[::-1]

    # Initialize node dictionary --
    #   where key := FID of feature (integer)
    #   where value[0] := color of feature (integer)
    #   where value[1] := FIDs of neighboring features (set)
    nodes = collections.OrderedDict()
    for item in arr_uniq[arr_ind]:
        nodes[item] = [0, set()]
    # Populate neighbors
    for index in range(arr.shape[0]):
        nodes[arr[index, 0]][1].add(arr[index, 1])

    # Color nodes --
    colors = set()
    for node in nodes:
        # Get colors of neighboring nodes
        nbr_colors = get_colors(nodes, nodes[node][1])
        # Search for a color not among those colors
        choices = colors.difference(nbr_colors)
        # Assign the node that color or create it when necessary
        if len(choices) == 0:
            new_color = get_new_color(colors)
            colors.add(new_color)
            nodes[node][0] = new_color
        else:
            nodes[node][0] = choose_color(choices)

    # Classify nodes by colors --
    classes = {}
    for node in nodes:
        color = nodes[node][0]
        if color in classes:
            classes[color].add(node)
        else:
            classes[color] = set([node])

    # Get set of all FIDs
    all_fids = set()
    with arcpy.da.SearchCursor(temp_features, oid_field) as cursor:
        for row in cursor:
            all_fids.add(row[0])

    # Add disjoint FIDs to new class if necessary
    disjoint_fids = all_fids.difference(set(nodes.keys()))
    if len(disjoint_fids) > 0:
        new_color = get_new_color(colors)
        classes[new_color] = disjoint_fids

    # Calculate number of classes
    num_classes = len(classes)

    # Save the classes as a new field--modified by Nicole Smith
    arcpy.AddField_management(temp_features, "OVERLAP_GROUP", "SHORT")
    for index, cl in enumerate(classes):
        ##        arcpy.SetProgressorLabel(
        ##            "Processing layer %d of %d..." % (index+1, num_classes))
        test = tuple(map(int, classes[cl]))

        where_clause = '\"%s\" IN %s' % (oid_field, \
            test)
        feature_lyr = arcpy.MakeFeatureLayer_management(temp_features)
        arcpy.SelectLayerByAttribute_management(feature_lyr, "NEW_SELECTION",
                                                where_clause)
        arcpy.CalculateField_management(feature_lyr, "OVERLAP_GROUP", cl,
                                        "PYTHON")
        arcpy.SelectLayerByAttribute_management(feature_lyr, "CLEAR_SELECTION")

    arcpy.CopyFeatures_management(in_feature_class, out_feature_class)
    arcpy.JoinField_management(out_feature_class, feature_field, temp_features,
                               feature_field, "OVERLAP_GROUP")
示例#26
0
else:
    arcpy.AddField_management("mask", "vegetation", "SHORT", "", "", 10)
for fc in fclist:
    dsc_fc = arcpy.Describe(fc)
    arcpy.MakeFeatureLayer_management(fc, "{0}".format(dsc_fc.basename))
    arcpy.SelectLayerByLocation_management("mask", "WITHIN_A_DISTANCE",
                                           "{0}".format(dsc_fc.basename), dist,
                                           "ADD_TO_SELECTION")
with arcpy.da.UpdateCursor("mask", ['vegetation']) as cursor:
    for row in cursor:
        row[0] = 1
        cursor.updateRow(row)
del row
del cursor
print('vegetation update finished!')
arcpy.SelectLayerByAttribute_management("mask", "SWITCH_SELECTION")
with arcpy.da.UpdateCursor("mask", ['vegetation']) as cursor:
    for row in cursor:
        row[0] = 0
        cursor.updateRow(row)
del row
del cursor
arcpy.SelectLayerByAttribute_management("mask", "CLEAR_SELECTION")
arcpy.CopyFeatures_management("mask",
                              outfolder + "na_presence_beetle_cohosts_5km.shp")
layer = "mask"
del layer

stop = timeit.default_timer()
print stop - start
示例#27
0
    # Food: apply ALC multiplier
    # --------------------------
    if food_scores:
        # Add new field and copy over basic food score (this is the default for habitats not used for intensive food production)
        print("Setting up food multiplier field")
        MyFunctions.check_and_add_field(NatCap_scores,"FoodxALC", "Float", 0)
        arcpy.CalculateField_management(NatCap_scores,"FoodxALC","!Food!", "PYTHON_9.3")

        # Select intensive food production habitats and multiply food score by ALC multiplier (ignore 'Arable field margins')
        print("Multiplying by ALC multiplier")
        expression = "(" + hab_field + " = 'Arable' OR " + hab_field + " LIKE 'Arable and%' " \
                     "OR " + hab_field + " LIKE 'Cultivated%' OR " + hab_field + " LIKE 'Improved grass%' " \
                     "OR " + hab_field + " LIKE 'Agric%' OR " + hab_field + " ='Orchard') AND ALC_mult IS NOT NULL"
        arcpy.MakeFeatureLayer_management(NatCap_scores, "Intensive_farmland")
        arcpy.SelectLayerByAttribute_management("Intensive_farmland", where_clause=expression)
        arcpy.CalculateField_management("Intensive_farmland", "FoodxALC", "!Food! * !ALC_mult!", "PYTHON_9.3")
        arcpy.Delete_management("Intensive_farmland")

        # Add new field and calculate normalised food score
        print("Calculating normalised food score")
        MyFunctions.check_and_add_field(NatCap_scores,"Food_ALC_norm","Float", 0)
        arcpy.CalculateField_management(NatCap_scores, "Food_ALC_norm", "!FoodxALC!  / " + str(Max_food_mult), "PYTHON_9.3")

    # Aesthetic value: apply AONB multiplier
    #--------------------------------------
    if aesthetic_scores:
        # Add new field and populate with aesthetic value score (default for habitats not in AONB)
        print("Setting up new field for adjusted aesthetic value")
        MyFunctions.check_and_add_field(NatCap_scores, "Aesthetic_AONB", "Float", 0)
        arcpy.CalculateField_management(NatCap_scores,"Aesthetic_AONB", "!Aesthetic!", "PYTHON_9.3")
示例#28
0
def ReduceSites(self, parameters, messages):
    import sys, string, os, math, traceback
    import arcgisscripting
    import arcpy
    import math
    messages.addMessage("Starting sites reduction")
    messages.addMessage("------------------------------")

    # Process: Missing Data Variance...
    try:
        #gp.AddMessage("Point 1");

        TrainPts = parameters[0].valueAsText
        OutputPts = parameters[5].valueAsText

        messages.addMessage("%-20s %s" % ("Training points:", TrainPts))
        arcpy.SelectLayerByAttribute_management(TrainPts)
        #gp.AddMessage("%s All Selected = %s"%(TrainPts,str(gp.GetCount_management(TrainPts))))
        #Get initial selection within mask
        maskpolygon = arcpy.env.mask
        if maskpolygon is None:
            #messages.addErrorMessage("Mask doesn't exist! Set Mask under Analysis/Environments.");
            raise arcpy.ExecuteError(
                "Mask doesn't exist! Set Mask under Analysis/Environments.")

        if not arcpy.Exists(arcpy.env.mask):
            #messages.addErrorMessage("Mask doesn't exist! Set Mask under Analysis/Environments.");
            raise arcpy.ExecuteError(
                "Mask doesn't exist! Set Mask under Analysis/Environments.")
        maskname = arcpy.Describe(arcpy.env.mask).Name

        messages.AddMessage("%-20s %s" %
                            ("Scratch workspace:", arcpy.env.scratchWorkspace))
        maskpolygon = arcpy.env.mask
        messages.AddMessage("%-20s %s" % ("Using mask:", maskname))
        messages.AddMessage(
            "%-20s %s" %
            ("Mask type:", arcpy.Describe(arcpy.env.mask).dataType))
        messages.AddMessage("%-20s %s" % ("Selection layer:", OutputPts))
        if not arcpy.Exists(maskpolygon):
            raise arcpy.ExecuteError("Mask doesn't exist - aborting")

        datatype = arcpy.Describe(arcpy.env.mask).dataType
        if (datatype != "FeatureLayer"):
            raise arcpy.ExecuteError(
                "Reduce training points tool requires mask of type 'FeatureLayer!'"
            )

        #This fails with raster mask:
        arcpy.MakeFeatureLayer_management(maskpolygon, maskname)
        #maskname = maskname + "_tmp";
        #arcpy.MakeRasterLayer_management(maskpolygon, maskname)

        arcpy.SelectLayerByLocation_management(TrainPts, 'CONTAINED_BY',
                                               maskname, "#",
                                               'SUBSET_SELECTION')
        tpcount = arcpy.GetCount_management(TrainPts)
        #messages.AddMessage("debug: Selected by mask = "+str(tpcount))

        thin = parameters[1].valueAsText == 'true'
        #messages.addMessage("debug: thin: = " + str(thin)) ;
        if thin:
            UnitArea = parameters[2].value
        random = parameters[3].valueAsText == 'true'

        #SDMValues.appendSDMValues(gp, UnitArea, TrainPts)
        # This is used to test if OID is OBJECTID or FID
        field_names = [f.name for f in arcpy.ListFields(TrainPts)]

        if ('OBJECTID' in field_names):
            messages.AddMessage(
                "Object contains OBJECTID and is geodatabase feature")
        else:
            messages.AddMessage("Object contains FID and is of type shape")

        if thin:
            #Get minimum allowable distance in meters based on Unit Area
            minDist = math.sqrt(UnitArea * 1000000.0 / math.pi)

            #Make list of points from mask-selected featureclass
            listPnts = []
            feats = arcpy.SearchCursor(TrainPts)
            #messages.addMessage(dir(feats));
            feat = feats.next()

            while feat:
                pnt = feat.Shape  # .GetPart(0)
                # Geodababase
                if ("OBJECTID" in field_names):
                    listPnts.append((pnt, feat.OBJECTID))
                else:
                    listPnts.append((pnt, feat.FID))
                feat = feats.next()
            #gp.AddMessage("%s = %s"%('Num listPnts',listPnts[0]))

            #Make list of selected points by making a new list of points
            #not within minimum allowable distance of other points.
            """
                Test point n against list of points saved as having no neighbors within 
                allowable distance of all points.
            """
            NewAlg = 2
            if NewAlg == 2:
                """ Faster processing of table, but same brute-force algorithm as NEwAlg == 1 """
                class POINT(object):
                    def __init__(self, Pnt, FID):
                        """ Pnt is an ESRI geoprocessing point object """
                        self.x = Pnt.X
                        self.y = Pnt.Y
                        self.fid = FID

                    def __eq__(self, otherpnt):
                        return self.x == otherpnt.x and self.y == otherpnt.y

                    def __cmp___(self, otherpnt):
                        if self.x == otherpnt.x and self.y == otherpnt.y:
                            return 0
                        else:
                            return 1

                    def __repr__(self):
                        return "%s, %s, %s" % (self.x, self.y, self.fid)

                def rowgen(searchcursor_rows):
                    """ Convert gp searchcursor to a generator function """
                    rows = searchcursor_rows
                    row = rows.next()
                    while row:
                        yield row
                        row = rows.next()

                def distance(pnt1, pnt0):
                    return math.hypot(pnt1.x - pnt0.x, pnt1.y - pnt0.y)

                def brute_force(savedPnts, unitRadius, point):
                    """
                        1. Add first point to saved list.
                        2. Check if next point is within Unit radius of saved points.
                        3. If not, add it to saved list.
                        4. Go to 2.
                        
                        The number tried is n/2 on average, because saved list grows from 1.
                        nTrials = Sigma(x,x=(1,n)) = n/2 + n*n/4 or O(n*n)
                        nTrials(10) = 5 + 25 = 30
                        nTrials(100) = 50 + 2500 = 2550
                        nTrials(1000) = 500 + 250,000 = 250,500
                        nTrials(10,000) = 5000 + 25,000,000 = 25,050,000
                    """
                    for pnt in savedPnts:
                        d = distance(pnt, point)
                        if d < unitRadius: return False
                    return True

                #Make list of points from mask-selected featureclass
                savedPnts = []
                feats = rowgen(arcpy.SearchCursor(TrainPts))
                # This is python 3 specific:
                feat = next(feats)
                if not feat:
                    raise Exception('No feature rows selected')
                pnt = feat.Shape.getPart(0)
                if ("OBJECTID" in field_names):
                    point = POINT(pnt, feat.OBJECTID)
                else:
                    point = POINT(pnt, feat.FID)

                savedPnts.append(point)

                unitRadius = minDist
                for feat in feats:
                    pnt = feat.Shape.getPart(0)
                    if ("OBJECTID" in field_names):
                        point = POINT(pnt, feat.OBJECTID)
                    else:
                        point = POINT(pnt, feat.FID)
                    if brute_force(savedPnts, unitRadius, point):
                        savedPnts.append(point)
                fidl = savedPnts
                if len(fidl) > 0:
                    #Compose SQL where clause like:  "FID" IN (11, 233, 3010,...)
                    if ("OBJECTID" in field_names):
                        fids = '"OBJECTID" IN (%d' % fidl[0].fid
                    else:
                        fids = '"FID" IN (%d' % fidl[0].fid
                    for pnt in fidl[1:]:
                        fids += ', %d' % pnt.fid
                    fids += ')'

            elif NewAlg == 1:
                """ This algorithm does not have correct FIDs with points """
                #Make list of points from mask-selected featureclass
                listPnts = []
                feats = gp.SearchCursor(TrainPts)
                feat = feats.Next()
                while feat:
                    pnt = feat.Shape.GetPart(0)
                    listPnts.append((pnt, feat.FID))
                    feat = feats.Next()
                #gp.AddMessage("%s = %s"%('Num listPnts',listPnts[0]))
                fidl = []
                fidl.append(listPnts[0])
                #gp.AddMessage (str(fidl2))
                for (p0, FID) in listPnts[1:]:
                    OK = 1
                    for (p1, _) in fidl:
                        dst = math.hypot(p1.X - p0.X, p1.Y - p0.Y)
                        #dst = math.sqrt((p1.X-p0.X)**2 + (p1.Y-p0.Y)**2)
                        if dst < minDist:
                            OK = 0
                            break
                    if OK: fidl.append((p0, FID))
                #gp.AddMessage('fidl: %s'%fidl)
                #gp.AddMessage('fidl:'+str(len(fidl))+","+str(fidl))
                #Form selected set from FID list
                fids = 'FID = '
                for (p, fid) in fidl:
                    fids += (fid + ' or FID = ')
                fids = fids[:len(fids) - 9]  #TODO: Fix this ...
            else:
                ''' Legacy Algorithm
                    This algorithm does not have correct FIDs with points
                '''
                #Make list of points from mask-selected featureclass
                listPnts = []
                feats = gp.SearchCursor(TrainPts)
                feat = feats.Next()
                while feat:
                    pnt = feat.Shape.GetPart(0)
                    listPnts.append(pnt)
                    feat = feats.Next()
                #gp.AddMessage("%s = %s"%('Num listPnts',listPnts[0]))
                bmSize = tpcount
                fidl = []  #list of indeces of points not close to other points
                first = 1
                s = 1  #number of tests
                fid = 0  #index of tested point and for message at fid mod 100
                for p0 in listPnts:
                    if (fid % 100) == 99: gp.AddMessage("No. tested: %s" % fid)
                    if first:
                        fidl.append(fid)
                        first = 0
                        fid += 1
                        continue
                    else:
                        OK = 1
                        #j is index of tested points.
                        #ith point in points list is tested against jth point in points list
                        #listPnts is ordered on FID
                        for j in range(0, fid):
                            p1 = listPnts[j]
                            s += 1
                            #if jth point index saved in fidl, test ith point against it
                            if j in fidl:
                                dst = math.sqrt((p1.X - p0.X)**2 +
                                                (p1.Y - p0.Y)**2)
                                if dst < minDist:
                                    OK = 0
                                    break
                        if OK and (
                                fid < bmSize
                        ):  # fid is always less than no. of selected points
                            fidl.append(fid)
                    fid += 1
                    s += 1

                #gp.AddMessage('fidl:'+str(len(fidl))+","+str(fidl))
                if ('OBJECTID' in field_names):
                    fids = 'OBJECTID = ('
                else:
                    fids = 'FID = ('
                for fid in fidl:
                    fids += "%d, " % fid
                fids += ')'
            #gp.AddMessage('fids:'+str(fids))
            total_amount_of_points = (arcpy.GetCount_management(TrainPts))

            arcpy.SelectLayerByAttribute_management(TrainPts,
                                                    'SUBSET_SELECTION', fids)
            messages.AddMessage("Selected by thinning = " +
                                str(arcpy.GetCount_management(TrainPts)) +
                                "/" + str(total_amount_of_points))

    #Random site reduction can take place after thinning
        if random:
            from random import Random
            randomcutoff = float(parameters[4].valueAsText)
            #Make this validator thing...
            if randomcutoff >= 1 and randomcutoff <= 100:
                randomcutoff = randomcutoff / 100.0
            else:
                messages.AddError("Random cutoff value not in range 1%-100%")
                raise Exception('User error')
            feats = arcpy.SearchCursor(TrainPts)
            feat = feats.next()
            randnums = []
            import random
            rand = random.Random(None)
            # Notice: We go through the items twice - to make sure we actually pick exabtly the percentage asked - not random
            while feat:
                randnums.append(rand.random())
                feat = feats.next()
            #gp.AddMessage("randnums: " + str(randnums))
            sorted_randnums = randnums * 1
            #gp.AddMessage("sorted_randnums: " + str(sorted_randnums))
            sorted_randnums.sort()
            #messages.addMessage("sorted_randnums: " + str(sorted_randnums))
            #gp.AddMessage("randnums: " + str(randnums))
            cutoff = sorted_randnums[int(
                randomcutoff *
                int(arcpy.GetCount_management(TrainPts).getOutput(0)))]
            #gp.AddMessage("cutoff: " + str(cutoff))
            if ('OBJECTID' in field_names):
                fids = 'OBJECTID = '
            else:
                fids = 'FID = '
            feats = arcpy.SearchCursor(TrainPts)
            i = 0
            feat = feats.next()
            #Is this first?
            first = 0

            while feat:
                if randnums[i] < cutoff:
                    if ('OBJECTID' in field_names):
                        if (first > 0):
                            fids += ' or OBJECTID = '
                        else:
                            first = 1
                        fids += (str(feat.OBJECTID))
                    else:
                        if (first > 0):
                            fids += ' or FID = '
                        else:
                            first = 1
                        fids += (str(feat.fid))
                i += 1
                feat = feats.next()
            del feats
            #Removing is not done this way... huoh...T
            #fids = fids[:len(fids)-9]
            #messages.AddMessage("Fids: " + fids)
            arcpy.SelectLayerByAttribute_management(TrainPts,
                                                    'SUBSET_SELECTION', fids)
            messages.AddMessage("Selected by random = " +
                                str(arcpy.GetCount_management(TrainPts)))

        if not thin and not random:
            gp.AddError("No training sites reduction method selected.")
            raise 'User Error'

        if OutputPts:  # save as a layer
            arcpy.CopyFeatures_management(TrainPts, OutputPts)
            arcpy.SetParameterAsText(5, OutputPts)

    except arcpy.ExecuteError as e:
        #TODO: Clean up all these execute errors in final version
        arcpy.AddError("\n")
        arcpy.AddMessage(
            "Training sites reduction caught arcpy.ExecuteError: ")
        args = e.args[0]
        args.split('\n')
        arcpy.AddError(args)

        arcpy.AddMessage("-------------- END EXECUTION ---------------")
        raise arcpy.ExecuteError
    except Exception as Msg:
        # get the traceback object
        tb = sys.exc_info()[2]
        # tbinfo contains the line number that the code failed on and the code from that line
        tbinfo = traceback.format_tb(tb)[0]
        # concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n    " + \
            str(sys.exc_type)+ ": " + str(sys.exc_value) + "\n"
        # generate a message string for any geoprocessing tool errors
        #msgs = "GP ERRORS:\n" + gp.GetMessages(2) + "\n"
        messages.addErrorMessage(pymsg)
        #msgs)

        # return gp messages for use with a script tool
        #messages.AddErrorMessage(pymsg)

        # print messages for use in Python/PythonWin
        print(pymsg)
        raise
示例#29
0
fcs = arcpy.ListFeatureClasses()

arcpy.AddMessage(" Executing: subset layer by attribute ...")
# Put in error trapping in case an error occurs when running tool
try:

    for fc in fcs:
        rows = arcpy.SearchCursor(fc)
        fileN = str.split(str(fc), '.')[0]
        for row in rows:
            species = row.Species
            tissue = row.Tissue
            year = row.Year
            expressionS = "\"Species\" = '%s'" % species
            expressionY = '"Year" = %d' % year
            expressionT = "\"Tissue\" = '%s'" % tissue
            arcpy.MakeFeatureLayer_management(fc, "fc_Lyr", expressionS)
            arcpy.SelectLayerByAttribute_management("fc_Lyr", "NEW_SELECTION",
                                                    expressionY)
            arcpy.SelectLayerByAttribute_management("fc_Lyr", "NEW_SELECTION",
                                                    expressionT)
            speciesN = ''.join(e for e in species if e.isalnum())
            yearN = str(int(year))
            tissueN = str(tissue)
            arcpy.CopyFeatures_management(
                "fc_Lyr", outWorkspace + fileN + "_" + speciesN + "_" +
                tissueN + "_" + yearN)

except:
    print arcpy.GetMessages()
示例#30
0
文件: spli.PY 项目: CDPZ/EHI-Cal
#coding=utf-8
import arcpy
import os
# 获取当前路径
currentPath=os.getcwd()
print currentPath
# 遍历当前路径
files=os.listdir(currentPath)
for file in files:
    # 判断是否为文件夹
    if(len(file.split('.'))<2):
        # 设置工作空间
        arcpy.env.workspace=currentPath+"\\"+file
        print file
        # 判断select+文件夹否存在,不存在则新建
        if not os.path.exists(currentPath+'\\'+'select'+file):
            os.mkdir(currentPath+'\\'+'select'+file)
        # 遍历文件夹
        shpFiles=os.listdir(file)
        for shpFile in shpFiles:
            # 判断是否为shp文件
            if(shpFile.split('.')[-1]=="shp"):
                inPoint=shpFile
                outPoint=currentPath+'\\'+'select'+file+'\\'+shpFile[1:11]+".shp"
                print inPoint
                lyr=shpFile[1:11]
                arcpy.MakeFeatureLayer_management(inPoint, lyr)
                arcpy.SelectLayerByAttribute_management(lyr, "NEW_SELECTION", '"date" LIKE \'%00:00\'')
                arcpy.CopyFeatures_management(lyr, outPoint)