コード例 #1
0
    
    
    arcpy.AddJoin_management('FrontLyr', fc[:-4] + 'APN', 'BackLyr', fc[:-4] + 'APN')
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'NEW_SELECTION', fc + "_Back.Proc IS NOT NULL")
    
    arcpy.CalculateField_management('FrontLyr', fc + '_Front.BackYard_Area', 'getVal(!' + fc + '_Back.Shape_Area!)', 'PYTHON_9.3', codeblock)
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'CLEAR_SELECTION')
    arcpy.RemoveJoin_management('FrontLyr')
    
    arcpy.SelectLayerByAttribute_management('BackLyr', 'NEW_SELECTION', "Proc IS NULL")
    arcpy.FeatureClassToFeatureClass_conversion('BackLyr', output,  fc + '_noFYBack')
    arcpy.SelectLayerByAttribute_management('BackLyr', 'CLEAR_SELECTION')


    arcpy.MakeFeatureLayer_management(output + '\\' + fc + '_noFYBack', 'NoFYBack_Lyr')    
    arcpy.Append_management('NoFYBack_Lyr', 'FrontLyr', 'NO_TEST')
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'NEW_SELECTION', 'BackYard_Area IS NULL')
    arcpy.CalculateField_management('FrontLyr', 'BackYard_Area', 'getVal(!Shape_Area!)', 'PYTHON_9.3', codeblock)
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'CLEAR_SELECTION')

    
    arcpy.Append_management('NoFYBY_Lyr', 'FrontLyr', 'NO_TEST')

    arcpy.SelectLayerByAttribute_management('FrontLyr', 'NEW_SELECTION', "FrontYard_Area IS NULL AND BackYard_Area IS NULL")
    arcpy.CalculateField_management('FrontLyr', 'NA_Area', 'getVal(!Shape_Area!)', 'PYTHON_9.3', codeblock)
    arcpy.SelectLayerByAttribute_management('FrontLyr', 'CLEAR_SELECTION')

    arcpy.AddField_management('FinalOut', 'SUM_SUM_FRONT_YD', 'DOUBLE')
    arcpy.AddField_management('FinalOut', 'SUM_SUM_BACK_YD', 'DOUBLE')
    arcpy.AddField_management('FinalOut', 'SUM_SUM_NA_YD', 'DOUBLE')
    
コード例 #2
0
    def parcelFlag(reviewName, reviewType, sourceFC, fieldCalculation,
                   whereClause, outputFC, workspace, parcels, parcelDict):
        reviewLayer = reviewType + '_' + reviewName
        reviewField = 'CODE' if reviewType == zonB else 'OVERLAY_NAME' if reviewType == zonO else reviewName + 'ReviewReason'
        print reviewField

        if '_Buffer' in reviewName:
            print('Buffering')
            arcpy.Buffer_analysis(sourceFC, reviewLayer, '500 Feet')
        else:
            print('Creating Local FC for ' + reviewName)
            arcpy.FeatureClassToFeatureClass_conversion(
                sourceFC, workspace, reviewLayer, whereClause)

        # All FCs except for Zoning are copied to LIGISDB
        if outputFC:
            print('Copying FC to GISLNI')
            arcpy.AddField_management(reviewLayer, reviewField, 'TEXT')
            arcpy.CalculateField_management(reviewLayer, reviewField,
                                            fieldCalculation)
            arcpy.AddField_management(reviewLayer,
                                      'REVIEW_TYPE',
                                      'TEXT',
                                      field_length=2000)
            arcpy.CalculateField_management(reviewLayer, 'REVIEW_TYPE',
                                            '"' + reviewName + '"')
            arcpy.DeleteRows_management(outputFC)
            arcpy.Append_management(reviewLayer, outputFC, 'NO_TEST')
        """
        print('Performing Intersect')
        #Create polygons where review polygons overlap with parcels
        arcpy.Intersect_analysis([parcels]+[reviewLayer], IntersectOutput, 'ALL')
        print('Intersect Complete')
        """
        arcpy.Delete_management(reviewLayer)
        """
        #To ensure no slivers are included a thiness ratio and shape area are calculated for intersecting polygons
        actualFields = [f.name for f in arcpy.ListFields(IntersectOutput)]
        print actualFields
        arcpy.AddField_management(IntersectOutput, 'ThinessRatio', 'FLOAT')
        arcpy.AddGeometryAttributes_management(IntersectOutput, 'AREA', Area_Unit='SQUARE_FEET_US')
        arcpy.AddGeometryAttributes_management(IntersectOutput, 'PERIMETER_LENGTH', 'FEET_US')
        arcpy.CalculateField_management(IntersectOutput, 'ThinessRatio',
                                        "4 * 3.14 * !POLY_AREA! / (!PERIMETER! * !PERIMETER!)", 'PYTHON_9.3')

        fieldList = ['PARCELID', 'ADDRESS', 'DISTRICT', 'Corner', 'GROSS_AREA', 'POLY_AREA',
                     'ThinessRatio', reviewField]
        IntersectCursor = arcpy.da.SearchCursor(IntersectOutput, fieldList)
        countin = int(arcpy.GetCount_management(IntersectOutput).getOutput(0))
        count = 0
        print('Found ' + str(countin) + ' records in input table')
        breaks = [int(float(countin) * float(b) / 100.0) for b in range(10, 100, 10)]
        for row in IntersectCursor:
            count += 1
            if count in breaks:
                print('Parsing Intersect FC ' + str(int(round(count * 100.0 / countin))) + '% complete...')
            #Only polygons with a thiness ratio of over 0.3 and a parcel coverage of more than 3% are included in analysis
            if row[1] != '' and row[1] is not None and row[7] != '': #and row[6] > 0.3 and (row[5] / float(row[4])) > 0.03 :
                #If parcel has not made it into dictionary, parcel gets a new entry added
                if row[0] not in parcelDict and reviewType != zonB and reviewType != zonO:
                    tempDict = {'Address': row[1], 'PAC': [], 'PCPC': [], 'TempPCPC': [], 'PHC': [], 'PWD': [], 'SteepSlope': 0, 'Floodplain': 0, 'CornerProp': row[3], 'ParcelArea': row[4], 'BaseZoning': [], 'OverlayZoning': [], 'District': row[2]}
                    tempDict[reviewType] = [row[7]]
                    if fieldCalculation == '"100 Year Flood Plain"':
                        tempDict['Floodplain'] = 1
                    if fieldCalculation == '"Steep Slope"':
                        tempDict['SteepSlope'] = 1
                    parcelDict[row[0]] = tempDict
                #If parcel already exists, its current dictionary entry is appended with new review information
                elif row[0] in parcelDict:
                    tempDict = parcelDict.get(row[0])
                    oldList = tempDict.get(reviewType)
                    #The set function removes all duplicates from list
                    tempDict[reviewType] = list(set([row[7]] + oldList))
                    if fieldCalculation == '"100 Year Flood Plain"':
                        tempDict['Floodplain'] = 1
                    if fieldCalculation == '"Steep Slope"':
                        tempDict['SteepSlope'] = 1
                    parcelDict[row[0]] = tempDict

        arcpy.Delete_management(IntersectOutput)
        """
        return parcelDict
コード例 #3
0
in_file = arcpy.GetParameterAsText(0)
out_file = arcpy.GetParameterAsText(1)

##in_file = 'g:/beijing/summer'
##out_file = 'g:/beijing/summer/convert'

if os.path.exists(out_file) == False:
    os.makedirs(out_file)

gg = os.listdir(in_file)
ff = []
for f in gg:
    if os.path.splitext(f)[1] == '.kml' or os.path.splitext[f][1] == '.kmz':
        ff.append(f)
arcpy.AddMessage(ff)
arcpy.CreateFeatureclass_management(out_file, 'result.shp', 'POLYGON')
arcpy.AddField_management(os.path.join(out_file, 'result.shp'), 'Name', 'TEXT')
out_feature = os.path.join(out_file, 'result.shp')

for i in range(len(ff)):
    in_file1 = os.path.join(in_file, ff[i])
    arcpy.KMLToLayer_conversion(in_file1, out_file)
    arcpy.AddMessage(ff[i] + 'has been succesfully converted')
    arcpy.Append_management(
        os.path.join(
            os.path.join(out_file,
                         os.path.splitext(ff[i])[0] + '.gdb'),
            'Placemarks_polygon'), out_feature, 'NO_TEST')
    arcpy.AddMessage(ff[i] +
                     'has been succesfully added to the result feature')
コード例 #4
0
def main(config_file, *args):

    # Set overwrite output option to True
    arcpy.env.overwriteOutput = True

    if isfile(config_file):
        config = ConfigParser.ConfigParser()
        config.read(config_file)

    else:
        print "INI file not found."
        sys.exit()

    # Config File

    localaddresses = config.get('LOCAL_DATA', 'localaddresses')
    communityaddresseslocalcopy = config.get('LOCAL_DATA',
                                             'communityaddresseslocalcopy')
    localfips = config.get('LOCAL_DATA', 'localfips')
    SiteAddressID = config.get('FIELD_MAPPER', 'siteaddressid')
    AddressPointID = config.get('FIELD_MAPPER', 'addresspointid')
    AddressNumberPrefix = config.get('FIELD_MAPPER', 'addressnumberprefix')
    AddressNumberSuffix = config.get('FIELD_MAPPER', 'addressnumbersuffix')
    FullAddressNumber = config.get('FIELD_MAPPER', 'fulladdressnumber')
    AddressRange = config.get('FIELD_MAPPER', 'addressrange')
    AddressUnitType = config.get('FIELD_MAPPER', 'addressunittype')
    AddressUnitNumber = config.get('FIELD_MAPPER', 'addressunitnumber')
    AlternateAddressUnitType = config.get('FIELD_MAPPER',
                                          'alternateaddressunittype')
    AlternateAddressUnitNumber = config.get('FIELD_MAPPER',
                                            'alternateaddressunitnumber')
    FullRoadName = config.get('FIELD_MAPPER', 'fullroadname')
    FullAddress = config.get('FIELD_MAPPER', 'fulladdress')
    PlaceName = config.get('FIELD_MAPPER', 'placename')
    MunicipalityName = config.get('FIELD_MAPPER', 'municipalityname')
    USNGCoordinate = config.get('FIELD_MAPPER', 'usngcoordinate')
    Description = config.get('FIELD_MAPPER', 'description')
    Location = config.get('FIELD_MAPPER', 'siteaddressid')
    CaptureMethod = config.get('FIELD_MAPPER', 'capturemethod')
    Status = config.get('FIELD_MAPPER', 'status')

    print "Loading Configuration File"
    arcpy.AddMessage("Loading Configuration File")

    if arcpy.Exists(localaddresses) == False:
        print "Please specify a input address feature class (localaddresses=) in the configuration file, exiting"
        arcpy.AddMessage(
            "Please specify a input parcel layer in the configuration file, exiting"
        )
        sys.exit()

    if communityaddresseslocalcopy == "":
        print "Please specify a input community parcel layer (communityaddresslocalcopy=) in the configuration file, exiting"
        arcpy.AddMessage(
            "Please specify a input parcel layer in the configuration file, exiting"
        )
        sys.exit()

    # Delete existing dataset that matches the community parcel schema
    arcpy.management.TruncateTable(communityaddresseslocalcopy)
    print "Cleaning up local address data"

    # Append new parcels into the community addresses schema, field map your data into the community schema.  Add local data field names after the "#" in the list.
    # For example, for STATEAREA "STATEAREA" true true false 50 Text 0 0 ,First,#,LocalParcels,TotalAcres,-1,-1;  The local Parcels field name from STATEDAREA (community parcels schema) is TotalAcres.

    common_vars = "true true false 300 Text 0 0, First, #"

    if SiteAddressID == "":
        new_field = """SITEADDID 'Site Address ID' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """SITEADDID 'Site Address ID' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, SiteAddressID)
    field_map = "{}".format(new_field)

    if AddressPointID == "":
        new_field = """ADDPTKEY 'Address Point ID' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """ADDPTKEY 'Address Point ID' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AddressPointID)

    field_map = "{}; {}".format(field_map, new_field)

    if AddressNumberPrefix == "":
        new_field = """PREADDRNUM 'Address Number Prefix' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """PREADDRNUM 'Address Number Prefix' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AddressNumberPrefix)

    field_map = "{}; {}".format(field_map, new_field)

    if AddressNumberSuffix == "":
        new_field = """ADDRNUMSUF  'Address Number Suffix' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """ADDRNUMSUF  'Address Number Suffix' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AddressNumberSuffix)

    field_map = "{}; {}".format(field_map, new_field)

    if FullAddressNumber == "":
        new_field = """ADDRNUM  'Full Address Number' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """ADDRNUM  'Full Address Number' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, FullAddressNumber)

    field_map = "{}; {}".format(field_map, new_field)

    if AddressRange == "":
        new_field = """ADDRRANGE 'Address Range' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """ADDRRANGE 'Address Range' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AddressRange)

    field_map = "{}; {}".format(field_map, new_field)

    if AddressUnitType == "":
        new_field = """UNITTYPE 'Address Unit Type' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """UNITTYPE 'Address Unit Type' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AddressUnitType)

    field_map = "{}; {}".format(field_map, new_field)

    if AddressUnitNumber == "":
        new_field = """UNITID 'Address Unit Number' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """UNITID 'Address Unit Number' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AddressUnitNumber)

    field_map = "{}; {}".format(field_map, new_field)

    if AlternateAddressUnitType == "":
        new_field = """ALTUNITTYPE 'Alternate Address Unit Type' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """ALTUNITTYPE 'Alternate Address Unit Type' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AlternateAddressUnitType)

    field_map = "{}; {}".format(field_map, new_field)

    if AlternateAddressUnitNumber == "":
        new_field = """ALTUNITID 'Alternate Address Unit Number' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """ALTUNITID 'Alternate Address Unit Number' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, AlternateAddressUnitNumber)

    field_map = "{}; {}".format(field_map, new_field)

    if FullRoadName == "":
        new_field = """FULLNAME 'Full Road Name' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """FULLNAME 'Full Road Name' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, FullRoadName)

    field_map = "{}; {}".format(field_map, new_field)

    if FullAddress == "":
        new_field = """FULLADDR 'Full Address' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """FULLADDR 'Full Address' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, FullAddress)

    field_map = "{}; {}".format(field_map, new_field)

    if PlaceName == "":
        new_field = """PLACENAME 'Place Name' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """PLACENAME 'Place Name' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, PlaceName)

    field_map = "{}; {}".format(field_map, new_field)

    if MunicipalityName == "":
        new_field = """MUNICIPALITY  'Municipality Name' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """MUNICIPALITY  'Municipality Name' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, MunicipalityName)

    field_map = "{}; {}".format(field_map, new_field)

    if USNGCoordinate == "":
        new_field = """USNGCOORD 'USNG Coordinate' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """USNGCOORD 'USNG Coordinate' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, USNGCoordinate)

    field_map = "{}; {}".format(field_map, new_field)

    if Description == "":
        new_field = """ADDRCLASS 'Description' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """ADDRCLASS 'Description' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, Description)

    field_map = "{}; {}".format(field_map, new_field)

    if Location == "":
        new_field = """POINTTYPE 'Location' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """POINTTYPE 'Location' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, Location)

    field_map = "{}; {}".format(field_map, new_field)

    if CaptureMethod == "":
        new_field = """CAPTUREMETH 'Capture Method' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """CAPTUREMETH 'Capture Method' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, CaptureMethod)

    field_map = "{}; {}".format(field_map, new_field)

    if Status == "":
        new_field = """STATUS 'Status' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """STATUS 'Status' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, Status)

    field_map = "{}; {}".format(field_map, new_field)

    if localfips == "":
        new_field = """localfips 'Local FIPS code' true true false 300 Text 0 0, First, #"""

    else:
        new_field = """localfips 'Local FIPS code' {}, {}, {}, -1, -1""".format(
            common_vars, localaddresses, localfips)

    field_map = "{}; {}".format(field_map, new_field)

    arcpy.Append_management(localaddresses, communityaddresseslocalcopy,
                            "NO_TEST", field_map)

    print "Mapping Local Address data to Community Address Schema"
    print "Loading Addresses to Community Addresses dataset"
    arcpy.AddMessage("Mapping Local Address data to Community Address Schema")
    arcpy.AddMessage("Loading Addresses to Community Addresses dataset")

    # Calculate the Last Update field

    arcpy.CalculateField_management(communityaddresseslocalcopy, "LASTUPDATE",
                                    "time.strftime(\"%m/%d/%Y\")", "PYTHON",
                                    "")
    print "Calculating Last Update"
    arcpy.AddMessage("Calculating Last Update")

    #Calculate Last Editor Field
    calc0 = '"{0}"'.format(localfips)
    arcpy.CalculateField_management(communityaddresseslocalcopy, "LASTEDITOR",
                                    calc0)
    print "Calculating Last Editor"
    arcpy.AddMessage("Calculating Last Editor")

    # Calculate the LOCALFIPS to the County/City Name
    calc = '"{0}"'.format(localfips)
    arcpy.CalculateField_management(communityaddresseslocalcopy, "localfips",
                                    calc, "VB", "")
    print "Set FIPS Code information"
    arcpy.AddMessage("Calculating 'FIPS' Code Information")
コード例 #5
0
def insertar_registros(data):
    arcpy.env.overwriteOutput = True
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(4326)
    db = 'CPV_SEGMENTACION_GDB'
    ip = '172.18.1.93'
    usuario = 'sde'
    password = '******'
    path_conexion = "Database Connections/{}.sde".format(db)
    #path_conexion = "d:/conexion/{}.sde".format(db)
    #path_conexion=conx.conexion_arcgis(db,ip,usuario,password)
    arcpy.env.workspace = path_conexion

    segm_ruta = path_conexion + "/{db}.SDE.SEGM_U_RUTA".format(db=db)
    segm_aeu = path_conexion + "/{db}.SDE.SEGM_U_AEU".format(db=db)
    segm_subzona = path_conexion + "/{db}.SDE.SEGM_U_SUBZONA".format(db=db)
    segm_seccion = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_U_SECCION".format(
        db=db)
    segm_rutas_lineas = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_RUTAS_LINEAS".format(
        db=db)
    segm_rutas_lineas_multi = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_RUTAS_LINEAS_MULTIFAMILIAR".format(
        db=db)
    segm_rutas_puntos = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_RUTAS_PUNTOS".format(
        db=db)
    segm_frentes_1 = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_FRENTES_1".format(
        db=db)
    segm_frentes_2 = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_FRENTES_2".format(
        db=db)
    segm_frentes_3 = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_FRENTES_3".format(
        db=db)
    segm_sitios_interes = path_conexion + "/{db}.SDE.URBANO/{db}.SDE.SEGM_SITIOS_INTERES".format(
        db=db)
    segm_vivienda_u = path_conexion + "/{db}.SDE.SEGM_U_VIV".format(db=db)
    #list_capas_ini = [tb_viviendas_ordenadas_dbf, tb_rutas, tb_aeus, tb_secciones, tb_subzonas, tb_rutas_lineas,tb_rutas_lineas_multifamiliar]

    #list_capas_fin = [segm_vivienda_u, segm_ruta, segm_aeu, segm_seccion, segm_subzona, segm_rutas_lineas,segm_rutas_lineas_multi]

    list_capas = [
        [tb_viviendas_ordenadas_dbf, segm_vivienda_u, 2],
        [tb_rutas, segm_ruta, 2],
        [tb_aeus, segm_aeu, 2],
        [tb_subzonas, segm_subzona, 2],
        [tb_secciones, segm_seccion, 1],
        [tb_rutas_lineas, segm_rutas_lineas, 1],
        [tb_rutas_lineas_multifamiliar, segm_rutas_lineas_multi, 1],
        [tb_frentes_1, segm_frentes_1, 1],
        [tb_frentes_2, segm_frentes_2, 1],
        [tb_frentes_3, segm_frentes_3, 1],
        [tb_sitios_interes, segm_sitios_interes, 1],
        [tb_rutas_puntos, segm_rutas_puntos, 1],
    ]

    if arcpy.Exists(tb_rutas_lineas_multifamiliar):
        list_capas.append(
            [tb_rutas_lineas_multifamiliar, segm_rutas_lineas_multi, 1])

    #tb_rutas, tb_aeus, tb_secciones, tb_subzonas, tb_rutas_lineas]

    #for i,el in enumerate(list_capas_ini):
    #dir = os.path.split(el)
    #formato = dir[1].split(".")[1]

    #dir_copia = os.path.join(dir[0], "final_{}".format(dir[1]))
    #print dir_copia

    #for i, el in enumerate(list_capas_ini):
    #    dir = os.path.split(el)
    #    dir_copia = os.path.join(dir[0], "final_{}".format(dir[1]))
    #    print dir_copia


#
#    formato = dir[1].split(".")[1]
#
#    if el == tb_rutas_lineas_multifamiliar:
#        if arcpy.Exists(tb_rutas_lineas_multifamiliar):
#            list_capas.append([dir_copia, segm_rutas_lineas_multi, 1])
#
#
#    else:
#
#        if formato == 'shp':
#            list_capas.append([dir_copia, list_capas_fin[i], 1])
#        else:
#            list_capas.append([dir_copia, list_capas_fin[i], 2])
#

    conn = conx.Conexion2()
    cursor = conn.cursor()

    for el in data:
        ubigeo = el[0]
        zona = el[1]
        sql_query = """
                DELETE {db}.SDE.SEGM_U_RUTA where ubigeo='{ubigeo}' and zona='{zona}'
                DELETE {db}.SDE.SEGM_U_AEU where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_U_VIV  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_U_SECCION where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_U_SUBZONA  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_RUTAS_LINEAS  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_RUTAS_LINEAS_MULTIFAMILIAR  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_FRENTES_1  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_FRENTES_2  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_FRENTES_3  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_SITIOS_INTERES  where ubigeo='{ubigeo}' and zona='{zona}'
                delete {db}.SDE.SEGM_RUTAS_PUNTOS  where ubigeo='{ubigeo}' and zona='{zona}'
                """.format(ubigeo=ubigeo, zona=zona, db=db)
        print sql_query
        cursor.execute(sql_query)
        conn.commit()

    conn.close()
    print list_capas

    i = 0
    for el in list_capas:
        print el[0]
        i = i + 1
        if (int(el[2]) > 1):
            a = arcpy.MakeTableView_management(
                el[0],
                "a{}".format(i),
            )

        else:
            a = arcpy.MakeFeatureLayer_management(el[0], "b{}".format(i))
            cant_el = int(arcpy.GetCount_management(a).getOutput(0))
            print 'cantidad de elementos: ', cant_el

        arcpy.Append_management(a, el[1], "NO_TEST")
コード例 #6
0
def stream_crosswalk(in_fc, HUC2_Lowr48, LYR_dir, DBF_dir, HUCdict, NoNHD,
                     csvpath):
    arcpy.MakeFeatureLayer_management(HUC2_Lowr48, "HUC48_lyr")
    # each row is a species
    # for each species first the HUC 2s are identified then the stream in each HUC are select and export to one master
    # table for the species
    # output is all streams for a species across all HUC2
    for row in arcpy.SearchCursor(in_fc):
        lyr = file_type + "_{0}_lyr".format(
            row.EntityID)  # layer name for selected features
        out_layer = LYR_dir + os.sep + lyr + ".lyr"
        table = "{1}_Streams_{0}.dbf".format(row.EntityID, file_type)
        temptable = os.path.join(DBF_dir, table)
        if arcpy.Exists(temptable):
            continue

        # NOTE NOTE HARD CODE TO ENTITYID
        entid = "{0}".format(row.EntityID)  # Extract entity ID number
        if entid not in species_to_run:  # skips species not in list
            continue
        if not arcpy.Exists(out_layer):
            whereclause = "EntityID = '%s'" % entid
            print whereclause
            # Makes a feature layer that will only include current entid using whereclause
            arcpy.MakeFeatureLayer_management(in_fc, lyr, whereclause)
            print "Creating layer {0}".format(lyr)
            arcpy.SaveToLayerFile_management(lyr, out_layer, "ABSOLUTE")
            print "Saved layer file"
        if not arcpy.Exists(temptable):
            spec_location = str(out_layer)
            # check for HUC a species occurs in
            arcpy.SelectLayerByLocation_management("HUC48_lyr", "INTERSECT",
                                                   spec_location)
            arcpy.Delete_management("slt_lyr")
            arcpy.MakeFeatureLayer_management("HUC48_lyr", "slt_lyr")
            # saves all HUC2 to a list this will have duplicates they are removed using the set below
            with arcpy.da.SearchCursor("slt_lyr", HUC2Field) as cursor:
                HUC2list = sorted({row[0] for row in cursor})

            print HUC2list

            # for each value in the HUC2 set will select all stream that are with species file, and save to a master
            # species table, one table per species will include all values
            counter = 0
            try:
                for z in HUC2list:
                    print z
                    huc_fc = HUCdict[z]
                    print huc_fc

                    arcpy.Delete_management("huc_lyr")
                    arcpy.MakeFeatureLayer_management(huc_fc, "huc_lyr")

                    # NOTE NOTE would a different selection type be better or should multople be used?
                    arcpy.SelectLayerByLocation_management(
                        "huc_lyr", "INTERSECT", out_layer, "", "NEW_SELECTION")
                    count = arcpy.GetCount_management("huc_lyr")
                    print str(count) + " selected features"
                    tableview = "tbl_view_" + str(counter)
                    arcpy.Delete_management(tableview)
                    arcpy.MakeTableView_management("huc_lyr", tableview)
                    count = int(
                        arcpy.GetCount_management(tableview).getOutput(0))
                    print str(count) + " Tableview"
                    if count < 1:
                        print 'Zero'
                        filename = str(lyr)
                        if filename not in NoNHD:
                            NoNHD.append(filename)
                        continue

                        # NOTE NOTE if the script is stop and a table was start but not completed for a species it
                        # must be deleted before starting the script again.If a table has been created the script will
                        # move to the next species
                    if counter == 0:  # This will be for first HUC (counter =0) for the species the table is  created
                        if count != 0:
                            filename = str(lyr)
                            arcpy.TableToTable_conversion(
                                tableview, DBF_dir, table)
                            print "created table: " + str(temptable)
                            counter += 1
                            if filename in NoNHD:
                                NoNHD.remove(filename)
                    else:  # remaining results for additional HUC selection values will be appened to table
                        arcpy.Append_management(tableview, temptable,
                                                "NO_TEST", "", "")
                        counter += 1
                print "table {0} completed. Located at {1}".format(
                    temptable, DBF_dir)
            except Exception as err:
                print(err.args[0])
                arcpy.Delete_management(temptable)
                print 'Deleted partial table {0}'.format(temptable)

        else:
            print "Stream Crosswalk for {0}".format(
                lyr) + " previously populated"

    arcpy.Delete_management("HUC48_lyr")

    create_outtable(NoNHD, csvpath)
    return NoNHD
コード例 #7
0
def append_results():
    print("Copying latest results into Main GDB")
    # arcpy.management.Append(r"D:\Work\waze_analysis\download\82c3be4239be45d68adad412a721432f.gdb\Waze_Ellipses", r"D:\Work\waze_analysis\wazeForPublish.gdb\WazeClusters", "TEST", None, None, "CLUSTER_ID > 0")
    arcpy.Append_management("Waze_Ellipses", r"..\..\wazeForPublish.gdb\WazeClusters", "TEST", None, None, "CLUSTER_ID > 0")
コード例 #8
0
ファイル: UpdateGeoDatabase.py プロジェクト: robgf/AMAPPS
                                "[ImputedDis]", "VB")

# Process: Delete Field
arcpy.DeleteField_management(
    TransInfoTempOut,
    "GIS_ID2;SUM_DistFl;StartDt2;EndDt2;DistFlown2;AvgConditi;ACWSD2;ACWSDrepor;WindArea2;MissingTra;ImputedDis"
)

# Process: Sort
arcpy.Sort_management(
    TransInfoTempOut, TransInfoTempOut2,
    "SurveyNbr ASCENDING;Transect ASCENDING;Replicate ASCENDING;Crew ASCENDING;Seat ASCENDING;Obs ASCENDING",
    "UR")

# Process: Append
arcpy.Append_management(TransInfoTempOut2, TransInfoOut, "TEST", "", "")

# Process: Sort
arcpy.Sort_management(
    TransInfoOut, TransInfoTempOut,
    "SurveyNbr ASCENDING;Transect ASCENDING;Replicate ASCENDING;Crew ASCENDING;Seat ASCENDING;Obs ASCENDING",
    "UR")

# Process: Sort
arcpy.Sort_management(
    TransInfoTempOut, TransInfoOut,
    "SurveyNbr ASCENDING;Transect ASCENDING;Replicate ASCENDING;Crew ASCENDING;Seat ASCENDING;Obs ASCENDING",
    "UR")

# Process: Delete
arcpy.Delete_management(ObsFileTempOut)
コード例 #9
0
ファイル: db_data_io.py プロジェクト: PDXBES/SurveyCatalog
 def append_table_to_db(self, input_table, target_table):
     # type: (str, str) -> None
     field_mappings = self._create_field_map_for_sde_db(input_table)
     arcpy.Append_management(input_table, target_table, "NO_TEST", field_mappings)
     arcpy.Delete_management(input_table)
コード例 #10
0
	arcpy.AddMessage("Step 7:  Reporting non-matched driver photos to table")

	# Cleanup matched Photos (intermediate data)

	arcpy.DeleteField_management(PhotoFeatureClass3, "IN_FID;NEAR_FID;NEAR_DIST")
	arcpy.Delete_management(NEAR)
	arcpy.AddField_management(PhotoFeatureClass2, "Path2", "TEXT", "", "", "150", "", "NULLABLE", "NON_REQUIRED", "")
	arcpy.CalculateField_management(PhotoFeatureClass2, "Path2", "!Path!", "PYTHON", "")
	arcpy.AddField_management(PhotoFeatureClass3, "Path2", "TEXT", "", "", "150", "", "NULLABLE", "NON_REQUIRED", "")
	arcpy.CalculateField_management(PhotoFeatureClass3, "Path2", "!Path!", "PYTHON", "")
	arcpy.DeleteField_management(PhotoFeatureClass2, "Path")
	arcpy.DeleteField_management(PhotoFeatureClass3, "Path")
	arcpy.AddField_management(PhotoFeatureClass3, "REVERSE", "TEXT", "", "", "5", "", "NULLABLE", "NON_REQUIRED", "")
	arcpy.CalculateField_management(PhotoFeatureClass3, "REVERSE", "\"NO\"", "PYTHON", "")

	arcpy.Append_management(PhotoFeatureClass2, PhotoFeatureClass3, "NO_TEST", "", "")

	#Create Photo Attachments

	ParcelPointClassHelper = Geodatabase + "\\PointsTemp"
	ParcelPointHelper = Geodatabase + "\\PhotoPoints"
	arcpy.FeatureClassToFeatureClass_conversion(TemplateFC,Geodatabase,"PhotoPoints")
	arcpy.DefineProjection_management(ParcelPointHelper, SRHelper)
	arcpy.AddField_management(ParcelPointHelper, ParcelPIN, "TEXT", "", "", "50", ParcelPIN, "NULLABLE", "NON_REQUIRED")
	arcpy.FeatureToPoint_management(ParcelsFeatureClass, ParcelPointClassHelper, "INSIDE")
else:
	pass

if CameraInput == 'Associate Geotagged Photo with Point (photo has location)':

	# ______________________________________________________________________________#
コード例 #11
0
    ### For the append, set the workspace environment and list the feature classes/tables in the Input GDB ###
    dirName = os.listdir(cleveImport)

    for file in dirName:

        gdb = cleveImport + '\\' + file
        arcpy.env.workspace = gdb

        ### Append tables into GDB ###
        appendTableList = arcpy.ListTables()
        print('\nAppending Tables:')

        for table in appendTableList:

            try:
                arcpy.Append_management(table, cle3GIS + '\\' + table,
                                        'NO_TEST')
                print(table)

            except:
                print('***UNABLE TO APPEND ' + str(table))
                print(arcpy.GetMessages())

        ### Append feature classes into GDB ###
        appendFeatureList = arcpy.ListFeatureClasses()
        print('\nAppending Feature Classes:')

        for fc in appendFeatureList:
            try:
                arcpy.Append_management(fc, cle3GIS + "\\" + fc, 'NO_TEST')
                print(fc)
コード例 #12
0
def sbdd_ProcessAddress (myFD, myFL):
   arcpy.AddMessage("     Begining Address Processing")
   theFields = ["FRN","PROVNAME","DBANAME","TRANSTECH","MAXADDOWN","MAXADUP",
                "TYPICDOWN","TYPICUP","Provider_Type","ENDUSERCAT"]
   chkFC = ["Address_frq","Address"]
   for cFC in chkFC:
       if arcpy.Exists(cFC):
           arcpy.Delete_management(cFC)
   if int(arcpy.GetCount_management(myFD + "/" + myFL).getOutput(0)) > 1:
       arcpy.Frequency_analysis(myFD + "/" + myFL, "Address" + "_frq", theFields, "")    
       #open a cursor loop to get all the distinct values
       myCnt = 1
       theQ = "(MAXADDOWN = '3' OR MAXADDOWN = '4' OR MAXADDOWN = '5' OR MAXADDOWN = '6' OR " + \
              " MAXADDOWN = '7' OR MAXADDOWN = '8' OR MAXADDOWN = '9' OR MAXADDOWN = '10' OR MAXADDOWN = '11') AND " + \
              "(MAXADUP = '2' OR MAXADUP = '3' OR MAXADUP = '4' OR MAXADUP = '5' OR MAXADUP = '6' OR " + \
              " MAXADUP = '7' OR MAXADUP = '8' OR MAXADUP = '9' OR MAXADUP = '10' OR MAXADUP = '11' )"
       for row in arcpy.SearchCursor("Address" + "_frq", theQ):
           theProviderType=row.getValue("Provider_Type")
           theEndUserCat=row.getValue("ENDUSERCAT")

           theProvName = row.getValue("PROVNAME").replace("'","")
           theDBA = row.getValue("DBANAME").replace("'","")
           theFRN = row.getValue("FRN")
           theTransTech = row.getValue("TRANSTECH")
           theAdUp = row.getValue("MAXADUP")
           theAdDown = row.getValue("MAXADDOWN")
           theTyUp = row.getValue("TYPICUP")
           theTyDown = row.getValue("TYPICDOWN")
           theTyUpQ = ""
           theTyDownQ = ""
           if theTyUp == "ZZ":
               theTyUp = "ZZ"  #used for naming / logic on calculating
               theTyUpQ = "TYPICUP = 'ZZ'"  #used as a selection set
           elif theTyUp == None:
               theTyUp = "IsNull"  #used for naming / logic on calculating
               theTyUpQ = "TYPICUP Is Null"  #used as a selection set
           elif theTyUp == " ":
               theTyUp = "IsNull"
               theTyUpQ = "TYPICUP = ' '"
           else:
               theTyUp = str(abs(int(theTyUp)))
               theTyUpQ = "TYPICUP = '" + theTyUp + "'"
           if theTyDown == "ZZ":
               theTyDown = "ZZ"  #used for naming / logic on calculating
               theTyDownQ = "TYPICDOWN = 'ZZ'"  #used as a selection set                
           elif theTyDown == None:
               theTyDown = "IsNull"
               theTyDownQ = "TYPICDOWN Is Null"
           elif theTyDown == " ":
               theTyDown = "IsNull"
               theTyDownQ = "TYPICDOWN = ' '"
           else:
               theTyDown = str(abs(int(theTyDown)))
               theTyDownQ = "TYPICDOWN = '" + theTyDown + "'"
           theQry = "FRN = '" + theFRN + "'"
           theQry = theQry + " AND TRANSTECH = " + str(theTransTech)
           theQry = theQry + " AND MAXADDOWN = '" + theAdDown + "' AND MAXADUP = '" 
           theQry = theQry + theAdUp + "' AND " + theTyUpQ + " AND " + theTyDownQ
           myFLName = theFRN + str(theTransTech) + theAdUp + theAdDown + theTyUp + theTyDown
           arcpy.MakeFeatureLayer_management(myFD + "/" + myFL, myFLName, theQry)
           if int(arcpy.GetCount_management(myFLName).getOutput(0)) > 0 :  #originally 1 for the raster case
               outPT = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \
                       theAdUp + "_" + theTyDown + "_" + theTyUp + "_x" #the selection of points
               outRT = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \
                       theAdUp + "_" + theTyDown + "_" + theTyUp + "_g" #the raster grid
               inPly = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \
                       theAdUp + "_" + theTyDown + "_" + theTyUp + "_p" #the output of grid poly
               bfPly = theST + theFRN + "_" + str(theTransTech) + "_" + theAdDown + "_" + \
                       theAdUp + "_" + theTyDown + "_" + theTyUp + "_pb" #the output of buffer
               chkFC = [outPT, outRT, inPly, bfPly]
               for cFC in chkFC:
                   if arcpy.Exists(cFC):
                       arcpy.Delete_management(cFC)
               del cFC, chkFC
               #first create a feature class of the selected points
               arcpy.FeatureClassToFeatureClass_conversion(myFLName, thePGDB, outPT) 
               arcpy.RepairGeometry_management(outPT)                 
               arcpy.Delete_management(myFLName)                
               if int(arcpy.GetCount_management(outPT).getOutput(0)) > 50:
                   #arcpy.AddMessage("          processing by raster point: " + outPT)
                   #second covert the selection to a grid data set (e.g. raster)
                   arcpy.PointToRaster_conversion(outPT, "FRN", outRT, "", "", 0.0028) 
                   theH = arcpy.Describe(outRT).Height
                   theW = arcpy.Describe(outRT).Width

                   if int(theH) > 2 and int(theW) > 2:
                       #third convert the rasters back to a polygon
                       arcpy.RasterToPolygon_conversion(outRT, inPly, "NO_SIMPLIFY", "") 
                       arcpy.AddField_management (inPly, "FRN", "TEXT", "", "", 10)
                       arcpy.AddField_management (inPly, "PROVNAME", "TEXT", "", "", 200)
                       arcpy.AddField_management (inPly, "DBANAME", "TEXT", "", "", 200)
                       arcpy.AddField_management (inPly, "TRANSTECH", "SHORT", "", "", "")
                       arcpy.AddField_management (inPly, "MAXADDOWN", "TEXT", "", "", 2)
                       arcpy.AddField_management (inPly, "MAXADUP", "TEXT", "", "", 2)
                       arcpy.AddField_management (inPly, "TYPICDOWN", "TEXT", "", "", 2)
                       arcpy.AddField_management (inPly, "TYPICUP", "TEXT", "", "", 2)
                       arcpy.AddField_management (inPly, "State", "TEXT", "", "", 2)
                       arcpy.AddField_management (inPly, "Provider_Type", "SHORT", "", "", "")
                       arcpy.AddField_management (inPly, "ENDUSERCAT", "TEXT", "", "", 2)



                       arcpy.CalculateField_management(inPly, "FRN", "'" + theFRN + "'" ,"PYTHON")
                       arcpy.CalculateField_management(inPly, "PROVNAME", r"'" + theProvName + "'" ,"PYTHON")
                       arcpy.CalculateField_management(inPly, "DBANAME", r"'" + theDBA + "'" ,"PYTHON")
                       arcpy.CalculateField_management(inPly, "TRANSTECH", theTransTech, "PYTHON")
                       arcpy.CalculateField_management(inPly, "MAXADDOWN", "'" + theAdDown + "'" ,"PYTHON")
                       arcpy.CalculateField_management(inPly, "MAXADUP", "'" + theAdUp + "'" ,"PYTHON")
                       #arcpy.AddMessage("theProvider_type: " + str(theProviderType))
                       if theTyDown <> "IsNull":
                           arcpy.CalculateField_management(inPly, "TYPICDOWN", "'" + theTyDown + "'" ,"PYTHON")
                       if theTyUp <> "IsNull":
                           arcpy.CalculateField_management(inPly, "TYPICUP", "'" + theTyUp + "'" ,"PYTHON")
                       arcpy.CalculateField_management(inPly, "State", "'" + theST + "'" ,"PYTHON")
                       arcpy.CalculateField_management(inPly, "Provider_Type", theProviderType,"PYTHON")
                       arcpy.CalculateField_management(inPly, "ENDUSERCAT", "'" + theEndUserCat + "'" ,"PYTHON")
                       #arcpy.AddMessage("theProvider_type: " + str(theProviderType))
                       arcpy.Buffer_analysis(inPly, bfPly, "100 Feet", "FULL", "ROUND", "LIST", theFields)
                       if myCnt == 1:  #this is the first time through, rename the bfPly to Address
                           arcpy.Rename_management(bfPly,"Address")
                       else: #otherwise append it to the first one through
                           arcpy.Append_management([bfPly], "Address")  
                   del theH, theW
               #then buffer them
               else:  
                   arcpy.AddMessage("          processing by buffering: " + outPT)
                   arcpy.Buffer_analysis(outPT, bfPly, "500 Feet", "FULL", "ROUND", "LIST", theFields)
                   if myCnt == 1:  #this is the first time through, rename the bfPly to Address
                       arcpy.Rename_management(bfPly,"Address")
                   else: #otherwise append it to the first one through
                       arcpy.Append_management([bfPly], "Address")  
               chkFC = [outPT, outRT, inPly, bfPly]
               for cFC in chkFC:
                   if arcpy.Exists(cFC):
                       arcpy.Delete_management(cFC)
               del outPT, outRT, inPly, bfPly, cFC, chkFC
               myCnt = myCnt + 1
           del theProvName, theDBA, theFRN, theTransTech, theAdUp, theAdDown, theTyUp, \
               theTyUpQ, theTyDown, theTyDownQ, theQry, myFLName, theProviderType,theEndUserCat
       sbdd_ExportToShape("Address")
       arcpy.Delete_management("Address_frq")
       del row, myCnt, theFields, theQ, myFL, myFD
   return ()
コード例 #13
0
            'Extruded'
        ]
        for item in del_fields:
            arcpy.DeleteField_management(results_FC, item)

#remove processing layers
for lyr in arcpy.mapping.ListLayers(mxd, "", df):
    if lyr.name.split('_')[-1] == "Projected":
        arcpy.mapping.RemoveLayer(df, lyr)

#find and remove problematic feature class names by appending to other FC's or renaming
arcpy.env.workspace = results_dataset
for FC in arcpy.ListFeatureClasses():
    Append = False
    Rename = False
    with arcpy.da.SearchCursor(FC, ['Name', 'FolderPath']) as SC:
        row = next(SC)
        Slash_Count = row[0].count('/')
        if Slash_Count <> 0:
            FC_correct_name = results_dataset + '\\' + row[1].split("/")[
                -1 - Slash_Count].replace(' ', '_')
            if arcpy.Exists(FC_correct_name):
                Append = True
            else:
                Rename = True
    if Append:
        arcpy.Append_management(FC, FC_correct_name)
        arcpy.Delete_management(FC)
    if Rename:
        arcpy.Rename_management(FC, FC_correct_name)
コード例 #14
0
def arcAppendFeatures(wd,inList,outName,template="template.shp"):
	import arcpy 
	arcpy.env.workspace = wd
	dsc = arcpy.Describe(template)
	arcpy.CreateFeatureclass_management(wd, outName, dsc.shapeType, inList[0], 'DISABLED', 'DISABLED', dsc.spatialReference)
	arcpy.Append_management(inList,outName)
コード例 #15
0
    def run(self):
        communities = self.folders.get_table('Zentren')
        ws_tmp = arcpy.env.scratchGDB
        sel_comm = os.path.join(ws_tmp, 'sel_comm')
        feat_buffered = os.path.join(ws_tmp, 'feat_buffered')
        markets_table = self.folders.get_table(self._markets_table,
                                               check=False)
        markets_tmp = self.folders.get_table('markets_tmp', check=False)
        markets_buffer = self.folders.get_table('markets_buffer', check=False)
        markets_buffer_output = self.folders.get_table(self._markets_buffer,
                                                       check=False)
        markets_com = os.path.join(ws_tmp, 'markets_com')

        def del_tmp():
            for f in [
                    sel_comm, feat_buffered, markets_buffer, markets_tmp,
                    markets_com
            ]:
                arcpy.Delete_management(f)

        del_tmp()

        arcpy.FeatureClassToFeatureClass_conversion(communities,
                                                    ws_tmp,
                                                    os.path.split(sel_comm)[1],
                                                    where_clause='Auswahl<>0')

        # ToDo: buffer -> multi_poly -> markets -> markets in selected communities -> remove those from markets in multi_poly -> to db

        arcpy.AddMessage('Analysiere Pufferbereich...')
        # create buffer area
        arcpy.Buffer_analysis(sel_comm,
                              feat_buffered,
                              self.par.radius_markets.value,
                              dissolve_option='NONE')
        try:
            self.output.remove_layer(self._out_layer_name)
        except:
            pass

        arcpy.Delete_management(markets_buffer_output)
        arcpy.Dissolve_management(feat_buffered, markets_buffer_output, "", "",
                                  "SINGLE_PART", "DISSOLVE_LINES")

        multi_poly = minimal_bounding_poly(feat_buffered)
        epsg = self.parent_tbx.config.epsg
        multi_poly = [[Point(p.X, p.Y, epsg=epsg) for p in poly]
                      for poly in multi_poly]
        reader = OSMShopsReader(epsg=epsg)
        markets = []
        arcpy.AddMessage('Ermittle Märkte im Randgebiet...')
        count = 1000
        for poly in multi_poly:
            m = reader.get_shops(poly, count=count - len(markets))
            markets += m

        if len(markets) > 0:
            # pro license only
            #arcpy.SymDiff_analysis(feat_buffered, sel_comm, buffer_diff)

            #self.parent_tbx.delete_rows_in_table(self._markets_table,
            #where='is_buffer=1')
            arcpy.CreateFeatureclass_management(os.path.split(markets_tmp)[0],
                                                os.path.split(markets_tmp)[1],
                                                template=markets_table)
            ids = [
                id
                for id, in self.parent_tbx.query_table(markets_table, ['id'])
            ]
            start_id = max(ids) + 1 if ids else 0
            markets = self.parse_meta(markets)
            self.markets_to_db(markets,
                               tablename=os.path.split(markets_tmp)[1],
                               is_buffer=True,
                               start_id=start_id,
                               is_osm=True)
            arcpy.Clip_analysis(markets_tmp, feat_buffered, markets_buffer)
            arcpy.Clip_analysis(markets_buffer, sel_comm, markets_com)
            cursor = arcpy.da.SearchCursor(markets_com, ['id'])
            in_com_ids = [str(id) for id, in cursor]
            del (cursor)
            where = 'id IN ({})'.format(','.join(in_com_ids))
            self.parent_tbx.delete_rows_in_table(markets_buffer, where)
            arcpy.Append_management(markets_buffer, markets_table)

            arcpy.AddMessage('Entferne Duplikate...')
            n = remove_duplicates(markets_table,
                                  'id',
                                  match_field='id_kette',
                                  where='is_buffer=1',
                                  distance=50)
            arcpy.AddMessage('{} Duplikate entfernt...'.format(n))

            self.set_ags()
        del_tmp()
コード例 #16
0
                arcpy.DeleteField_management(inputfc, i.name)

    ###########################################################################
    deleteeditfields(facilitiesstaging)
    clearWSLocks(StagingGDB)
    deleteeditfields(violationsstaging)
    if not arcpy.Exists(facilitiesfinal):
        arcpy.Copy_management(facilitiesstaging, facilitiesfinal)
    if not arcpy.Exists(violationsfinal):
        arcpy.Copy_management(violationsstaging, violationsfinal)
    arcpy.DeleteRows_management(facilitiesfinal)
    arcpy.DeleteRows_management(violationsfinal)
    ffieldmappings = arcpy.FieldMappings()
    ffieldmappings.addTable(facilitiesfinal)
    arcpy.Append_management(facilitiesstaging,
                            facilitiesfinal,
                            schema_type="NO_TEST",
                            field_mapping=ffieldmappings)
    vfieldmappings = arcpy.FieldMappings()
    vfieldmappings.addTable(violationsfinal)
    arcpy.Append_management(violationsstaging,
                            violationsfinal,
                            schema_type="NO_TEST",
                            field_mapping=vfieldmappings)

    ###############################################################################
    # CREATE RELATIONSHIP CLASS IF NOT EXISTS
    if not arcpy.Exists(facilitiesviolations_relationship):
        arcpy.CreateRelationshipClass_management(
            origin_table=facilitiesfinal,
            destination_table=violationsfinal,
            out_relationship_class=facilitiesviolations_relationship,
コード例 #17
0
output2.name = (outfield2)
input2.outputField = output2
# Set the Name of the Field output from this field map.
#
output3 = input3.outputField
output3.name = (outfield3)
input3.outputField = output3
# Set the Name of the Field output from this field map.
#
output4 = input4.outputField
output4.name = (outfield4)
input4.outputField = output4

# Add the custom fieldmaps into the fieldmappings object.
#
fieldmappings.addFieldMap(input1)
fieldmappings.addFieldMap(input2)
fieldmappings.addFieldMap(input3)
fieldmappings.addFieldMap(input4)

try:
    print "Appending data. . ."
    # Process: Append the feature classes into the empty feature class
    arcpy.Append_management(inFC, outFC, schemaType, fieldmappings, subtype)

except:
    # If an error occurred while running a tool print the messages
    print arcpy.GetMessages()

print "Append data to " + featureclassout + " " + " complete. . ."
コード例 #18
0
def main_func():

    start = datetime.datetime.now()
    err_msg = None

    global emailAttachments

    try:
        log_path = os.path.join(sys.path[0], 'logs')
        log, logfile = etgLib.create_log(log_path, log_name)
        if log == None: exit_sys(log, "can't create a log file", start)

        emailAttachments = [logfile]

        lbl_gdb = os.path.join(wkgFolder, labelGDBname)
        # set workspace
        arcpy.env.workspace = lbl_gdb

        etgLib.log_start(log)
        etgLib.log_info(log, "script parameters:")
        etgLib.log_info(log, "------------------------")
        etgLib.log_info(log, "working folder: {0}".format(wkgFolder))
        etgLib.log_info(log, "prepSPOWNpath: {0}".format(prepSPOWNpath))
        etgLib.log_info(log, "label gdb: {0}".format(lbl_gdb))

        ## ========================================
        ## Process: copy data from labels.gdb to SPOWN in pre-prod
        ## ========================================
        etgLib.log_info(
            log, 'copy Connect_Property from labels.gdb to SPOWN in pre-prod',
            True)
        # copy Connect_Property to *_o in SPOWN sde
        etgLib.log_info(log, 'copy Connect_Property to *_o in SPOWN sde ...')

        inFCpath = os.path.join(prepSPOWNpath, spown_property_connect)
        if arcpy.Exists(inFCpath):
            out_fc_name = spown_property_connect + "_o"
            outFCpath = os.path.join(prepSPOWNpath, out_fc_name)
            etgLib.log_info(
                log, 'Copying: {0} to {1}'.format(spown_property_connect,
                                                  out_fc_name))
            arcpy.Copy_management(inFCpath, outFCpath, "FeatureClass")

            #  truncate and append
            etgLib.log_info(log, 'truncate Connect_Property in SPOWN sde ...')
            arcpy.TruncateTable_management(inFCpath)

        # build the FieldMappings
        etgLib.log_info(log, 'build the FieldMappings ...')
        list_fields_to_map = []

        list_fields_to_map.append(
            ('PROPERTY_ID', 'tempDsslvID_TP_PROPERTY_LINK_PROPERTY_ID'))
        list_fields_to_map.append(
            ('LEGAL_DESCRIPTION', 'TP_PROPERTY_LEGAL_DESCRIPTION'))
        list_fields_to_map.append(('TITLE_NO', 'TP_PROPERTY_TITLE_NO'))
        list_fields_to_map.append(
            ('SUBADDRESS_ID', 'TP_PROPERTY_SUBADDRESS_ID'))
        list_fields_to_map.append(
            ('ADDRESS_SOURCE', 'TP_PROPERTY_ADDRESS_SOURCE'))
        list_fields_to_map.append(
            ('COUNT_PARCEL_ID',
             'tempDsslvID_COUNT_TP_PROPERTY_LINK_PARCEL_ID'))

        inFCpath = os.path.join(lbl_gdb, labels_property_connect)
        outFCpath = os.path.join(prepSPOWNpath, spown_property_connect)

        fieldmappings = etgLib.get_field_mapping(inFCpath, outFCpath,
                                                 list_fields_to_map)

        # appendinng data from labels.gdb to SPOWN preprod sde
        etgLib.log_info(
            log, 'appendinng data from labels.gdb to SPOWN preprod sde  ...')
        arcpy.Append_management([inFCpath],
                                outFCpath,
                                "NO_TEST",
                                field_mapping=fieldmappings)

    except Exception as e:
        err_msg = "ERROR while running {0}: {1}".format(script_name, e)

    etgLib.log_close(log, start)
    print("Finished!!!  Please check the result in ArcMap or ArcCatalog")

    if sendMail:
        if err_msg != None:
            emailSubject = 'Run {} - Failed'.format(script_name)
        else:
            emailSubject = 'Run {} - Successful'.format(script_name)

        etgLib.send_email(emailFrom, emailTo, emailSubject, emailText,
                          emailAttachments, smtpServer)
コード例 #19
0
                # define field names
                targetFields = [
                    "Strasse", "Hausnummer", "Zusatz", "Stadt", "PLZ",
                    "addressID", "geoSource"
                ]
                sourceFields = [
                    "Strasse", "Hausnummer", "Zusatz", "Stadt", "PLZ",
                    "addressID", "geoSource"
                ]

                # build field mappings
                useFieldmappings = createFieldMappings(targetFields, interm,
                                                       sourceFields)

                # append point coordinates
                arcpy.Append_management(interm, addressShp, "NO_TEST",
                                        useFieldmappings)

                if arcpy.Exists(interm):
                    arcpy.Delete_management(interm)

            except:
                print("Unable to add " + str(thisAdd))

    except:
        arcpy.AddMessage(
            "Unable to append additional points to house address data.")

# delete intermediate data
if arcpy.Exists(interm):
    arcpy.Delete_management(interm)
コード例 #20
0
#-------------------------------------------------------------------------------


def main():
    pass


if __name__ == '__main__':
    main()

# Import arcpy module

import arcpy
from arcpy import env
arcpy.env.workspace = "D:\\projects\\ak_fire\\gis\\data\\temp2"

# Set to overwrite
arcpy.env.overwriteOutput = True
file_list = arcpy.ListFeatureClasses()
target_shp = "allpolys.shp"
##min_n = 3501
##max_n = 4179
##target_shp = "allpolys" + str(min_n) + "_" + str(max_n) + ".shp"

arcpy.CopyFeatures_management("poly1.shp", target_shp)
print "list of files:" + str(file_list)
for f in file_list:
    print f
    arcpy.Append_management([f], target_shp, "NO_TEST")
print "Done!"
コード例 #21
0
                desc = arcpy.Describe(current_buffer)
                current_fishnet=os.path.join(env.workspace,naming+"current_fishnet")
                arcpy.CreateFishnet_management(current_fishnet,str(desc.extent.lowerLeft),str(desc.extent.XMin) + " " + str(desc.extent.YMax + 10),"","","2", "1","","NO_LABELS", current_buffer,'POLYGON')

                #clip current end point buffer by top half of fishnet
                #select top half of fishnet
                top_net=arcpy.MakeFeatureLayer_management(current_fishnet,"top_net.lyr","OID=2")
                clip_buffer=os.path.join(env.workspace,naming+"clipped_buffer")
                arcpy.Clip_analysis(current_buffer, top_net, clip_buffer)

                #if this is first buffer
                if row[0]==1:
                        #create the clipped buffer feature class
                        clipped_buffer_fc=arcpy.CreateFeatureclass_management(env.workspace, naming+"_clipped_buffer_fc", "POLYGON",clip_buffer,"DISABLED", "DISABLED", clip_buffer)
                #append clipped buffer to clipped buffer fc
                arcpy.Append_management(clip_buffer,clipped_buffer_fc)

del search

#mask the DEM by the clipped buffers
##arcpy.AddMessage("\t\tMasking DEM...")
masked_dem = ExtractByMask(dem, clipped_buffer_fc)
masked_dem.save(os.path.join(env.workspace,naming+"_maskedraster"))

#calculate the zonal minimum elevation in each clipped buffer
outZonalStats = ZonalStatistics(clipped_buffer_fc, "OBJECTID", masked_dem,"MINIMUM","DATA")
outZonalStats.save(os.path.join(env.workspace,naming+"endpoint_min_by_zones"))

#vectorize min zone rasters
min_points=os.path.join(env.workspace,naming+"min_points")
arcpy.RasterToPoint_conversion(outZonalStats, min_points, "Value")
コード例 #22
0
def perennialNetwork(nhd_orig_flowline_path, nhd_area_path, nhd_waterbody_path, outpath):

    #  environment settings
    arcpy.env.overwriteOutput = 'TRUE'

    # -- make copy of original nhd flowlines
    nhd_flowlines = arcpy.CopyFeatures_management(nhd_orig_flowline_path, 'in_memory/nhd_flowlines')

    # add source field to track which part of workflow perennial network flowline was added
    arcpy.AddField_management(nhd_flowlines, 'Source', 'TEXT', '', '', 75)

    # --perennial coded lines--

    # select lines from original nhd that are coded as perennial
    arcpy.MakeFeatureLayer_management(nhd_flowlines, 'nhd_flowlines_lyr')
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """)
    flowline_per = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/flowline_per')

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            row[0] = "1. Perennial Code"
            cursor.updateRow(row)

    # --add missing major rivers--
    # --subsetted artificial coded lines that are in perennial nhd area polygons--

    # select perennial coded nhd area polygons
    arcpy.MakeFeatureLayer_management(nhd_area_path, 'nhd_area_lyr')
    arcpy.SelectLayerByAttribute_management('nhd_area_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """)

    # select and dissolve artificial coded nhd lines that are within perennial nhd area polygons
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'nhd_area_lyr', '', 'SUBSET_SELECTION')
    flowline_art_code = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/flowline_art_code', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES')

    # remove short lines (< 50 m) that act as artificial connectors to flowlines outside perennial nhd area polygons
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'INTERSECT', 'nhd_area_lyr', '1 Meters', 'NEW_SELECTION')
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "FCODE" <> 55800 """)
    arcpy.MakeFeatureLayer_management(flowline_art_code, 'flowline_art_code_lyr')
    arcpy.SelectLayerByLocation_management('flowline_art_code_lyr', 'INTERSECT', 'nhd_flowlines_lyr', '', 'NEW_SELECTION')
    with arcpy.da.UpdateCursor('flowline_art_code_lyr', ['SHAPE@Length']) as cursor:
        for row in cursor:
            if row[0] < 50:
                cursor.deleteRow()

    # remove lines that end where canal starts
    mr_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_art_code, 'in_memory/mr_end_pt', "END")
    arcpy.MakeFeatureLayer_management(mr_end_pt, 'mr_end_pt_lyr')
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 33600 OR "FCODE" = 3601 OR "FCODE" = 3603""")
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'INTERSECT', flowline_art_code, '1 Meters', 'SUBSET_SELECTION')

    canal_start_pt = arcpy.FeatureVerticesToPoints_management('nhd_flowlines_lyr', 'in_memory/canal_start_pt', "START")
    arcpy.SelectLayerByLocation_management('mr_end_pt_lyr', 'INTERSECT', canal_start_pt, '', 'NEW_SELECTION')
    arcpy.SelectLayerByLocation_management('flowline_art_code_lyr', 'INTERSECT', 'mr_end_pt_lyr', '', 'NEW_SELECTION')

    arcpy.DeleteFeatures_management('flowline_art_code_lyr')
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_art_code, '', 'NEW_SELECTION')

    # add selected flowlines to the perennial stream shp
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            if row[0] is None:
                row[0] = "2. Major Artifical in Perennial Area Polygon"
            cursor.updateRow(row)

    # --add missing flowlines in marshes--
    # --artificial coded lines that are perennial gaps in marsh waterbody polygons--

    #  select nhd waterbodys that:
    #   - are coded as marshes (ftype 466)
    #   - intersect perennial stream start and end (i.e., are perennial stream inlet AND outlet)
    arcpy.MakeFeatureLayer_management(nhd_waterbody_path, 'nhd_waterbody_lyr')
    arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'NEW_SELECTION', """ "FTYPE" = 466 """)
    marshes = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/marshes')
    arcpy.MakeFeatureLayer_management(marshes, 'marshes_lyr')
    per_start_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_start_pt', "START")
    per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_end_pt', "END")
    arcpy.SelectLayerByLocation_management('marshes_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION')
    arcpy.SelectLayerByLocation_management('marshes_lyr', 'INTERSECT', per_end_pt, '', 'SUBSET_SELECTION')

    #  select and dissolve nhd flowlines that:
    #   - are coded as artificial
    #   - fall within selected marsh waterbodies
    #   - are not already part of perennial stream network
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'marshes_lyr', '', 'SUBSET_SELECTION')
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION')
    marsh_lines = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/marsh_lines', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES')

    marsh_gap_lines = findGaps(marsh_lines, flowline_per)

    # add selected flowlines to the perennial stream shp
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', marsh_gap_lines, '', 'NEW_SELECTION')
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            if row[0] is None:
                row[0] = "3. Artificial Network Gap in Marsh Waterbody"
            cursor.updateRow(row)

    # --add missing flowlines in smaller lakes and ponds--

    #  select nhd waterbodys that:
    #   - are coded as lakes/ponds (ftype 390)
    #   - area <= .03 sq km
    #   - are not named
    #   - intersect perennial stream start and end (i.e., are perennial stream inlet AND outlet)
    arcpy.SelectLayerByLocation_management('nhd_waterbody_lyr', 'INTERSECT', flowline_per, '', 'NEW_SELECTION')
    arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'SUBSET_SELECTION', """ "FTYPE" = 390 AND "AREASQKM" <= 0.03 AND "GNIS_NAME" = '' """)
    sm_lakes_ponds = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/sm_lakes_ponds')
    arcpy.MakeFeatureLayer_management(sm_lakes_ponds, 'sm_lakes_ponds_lyr')
    per_start_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_start_pt', "START")
    per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_end_pt', "END")
    arcpy.SelectLayerByLocation_management('sm_lakes_ponds_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION')
    arcpy.SelectLayerByLocation_management('sm_lakes_ponds_lyr', 'INTERSECT', per_end_pt, '', 'SUBSET_SELECTION')

    #  select nhd flowlines that:
    #   - fall within selected waterbodies
    #   - intersect a perennial streams (i.e., are gaps on perennial network)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'sm_lakes_ponds_lyr', '', 'NEW_SELECTION')
    flowline_wbody_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/flowline_wbody_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES')
    arcpy.MakeFeatureLayer_management(flowline_wbody_dissolve, 'flowline_wbody_dissolve_lyr')
    arcpy.SelectLayerByLocation_management('flowline_wbody_dissolve_lyr', 'INTERSECT', flowline_per, '', 'NEW_SELECTION')

    # add selected flowlines to the perennial stream shp
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_wbody_dissolve_lyr', '', 'NEW_SELECTION')
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            if row[0] is None:
                row[0] = "4. Network Gap in Small Lake/Pond Waterbody"
            cursor.updateRow(row)

    # --remove flowlines where 2 lines end but none start (indicate 'false perennial tribs')--

    per_start_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_start_pt', "START")
    per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_end_pt', "END")
    per_end_pt_join = arcpy.SpatialJoin_analysis(per_end_pt, per_end_pt, 'in_memory/per_end_pt_join', 'JOIN_ONE_TO_ONE', 'KEEP_ALL', '', 'INTERSECT')
    arcpy.MakeFeatureLayer_management(per_end_pt_join, 'per_end_pt_join_lyr')
    arcpy.SelectLayerByLocation_management('per_end_pt_join_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION')
    arcpy.SelectLayerByAttribute_management('per_end_pt_join_lyr', 'SWITCH_SELECTION')
    arcpy.SelectLayerByAttribute_management('per_end_pt_join_lyr', 'SUBSET_SELECTION', """ "Join_Count" >= 2 """)
    arcpy.MakeFeatureLayer_management(flowline_per, 'flowline_per_lyr')
    arcpy.SelectLayerByLocation_management('flowline_per_lyr', 'INTERSECT', 'per_end_pt_join_lyr', '', 'NEW_SELECTION')
    arcpy.DeleteFeatures_management('flowline_per_lyr')

    # --add named intermittent and connector flowlines that are directly downstream of perennial stream--

    # create perennial end pts shp (use to find intermittent that starts where perennial ends)
    flowline_per_dissolve = arcpy.Dissolve_management(flowline_per, 'in_memory/flowline_per_dissolve', '', '', 'SINGLE_PART', 'UNSPLIT_LINES')
    per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per_dissolve, 'in_memory/per_end_pt', "END")

    # select named intermitent and connector flowlines
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """)
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "GNIS_NAME" <> '' """)

    # dissolve selected flowlines by name
    flowline_int_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/flowline_int_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES')

    # create points at start of dissolved intermittent and connector flowlines
    int_start_pts = arcpy.FeatureVerticesToPoints_management(flowline_int_dissolve, 'in_memory/int_start_pts', "START")

    # select perennial end points that overlap intermittent/connector start points
    arcpy.MakeFeatureLayer_management(per_end_pt, 'per_end_pt_lyr')
    arcpy.SelectLayerByLocation_management('per_end_pt_lyr', 'INTERSECT', int_start_pts, '', 'NEW_SELECTION')

    # select dissolved intermitent and connector flowlines that intersect selected perennial end points
    # (these lines are directly downstream of perennial stream)
    arcpy.MakeFeatureLayer_management(flowline_int_dissolve, 'flowline_int_dissolve_lyr')
    arcpy.SelectLayerByLocation_management('flowline_int_dissolve_lyr', 'INTERSECT', 'per_end_pt_lyr', '', 'NEW_SELECTION')

    # add selected flowlines to the perennial stream shp
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_int_dissolve_lyr', '', 'NEW_SELECTION')
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            if row[0] is None:
                row[0] = "5. Named Intermittent/Connector Directly Downstream of Network Line"
            cursor.updateRow(row)

    # --add named intermittent flowlines that fall on gaps in the perennial network--

    # select intermittent flowlines that aren't part of perennial network up to this point
    # these are potential network gap lines
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """)
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "GNIS_NAME" <> '' """)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION')
    int_lines = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/int_lines')

    # find gaps on all selected lines
    int_gap_lines = findGaps(int_lines, flowline_per)

    # add itermittent gap to the perennial stream shp
    with arcpy.da.InsertCursor(flowline_per, ["SHAPE@"]) as iCursor:
        with arcpy.da.SearchCursor(int_gap_lines, ["SHAPE@"]) as sCursor:
            for row in sCursor:
                iCursor.insertRow([row[0]])

    # find gaps on dissolved lines (grabs lines that may be split by trib and otherwise wouldn't be selected)
    int_lines_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/int_lines_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES')
    int_gap_lines_dissolve = findGaps(int_lines_dissolve, flowline_per)

    # add itermittent gap to the perennial stream shp
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_dissolve, '', 'NEW_SELECTION')
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            if row[0] is None:
                row[0] = "6. Named Intermittent/Connector Network Gap"
            cursor.updateRow(row)

    # --add intermittent flowlines that fall on gaps in the perennial network--

    # select intermittent flowlines that aren't part of perennial network up to this point
    # these are potential network gap lines
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'ARE_IDENTICAL_TO', flowline_per, '', 'REMOVE_FROM_SELECTION')
    int_lines_all = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/int_lines_all')

    int_gap_lines_all = findGaps(int_lines_all, flowline_per)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_all, '', 'NEW_SELECTION')
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'ARE_IDENTICAL_TO', flowline_per, '', 'REMOVE_FROM_SELECTION')
    int_lines_all_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/int_lines_all_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES')
    int_gap_lines_all_dissolve = findGaps(int_lines_all_dissolve, flowline_per)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_all_dissolve, '', 'NEW_SELECTION')
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            if row[0] is None:
                row[0] = "7. Unnamed Intermittent/Connector Network Gap"
            cursor.updateRow(row)

    # --add artifical flowlines that fall on gaps in the perennial network--
    # --these are potential network gap lines--

    # select artificial coded flowlines that aren't part of perennial network up to this point
    arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """)
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION')

    # create search aoi from perennial area polygons and marsh waterbody polygons
    arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'NEW_SELECTION', """ "FTYPE" = 466 """)
    marshes = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/marshes')
    arcpy.SelectLayerByAttribute_management('nhd_area_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """)
    per_area = arcpy.CopyFeatures_management('nhd_area_lyr', 'in_memory/per_area')
    art_gap_aoi = arcpy.Merge_management([marshes, per_area], 'in_memory/art_gap_aoi')

    # subset selection to flowlines that flow throw search aoi
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', art_gap_aoi, '', 'SUBSET_SELECTION')
    art_lines = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/art_lines')

    art_gap_lines = findGaps(art_lines, flowline_per, 'True')

    # add artificial gap to the perennial stream shp
    arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', art_gap_lines, '', 'NEW_SELECTION')
    arcpy.Append_management('nhd_flowlines_lyr', flowline_per)

    # populate source field
    with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor:
        for row in cursor:
            if row[0] is None:
                row[0] = "8. Artificial Network Gap"
            cursor.updateRow(row)

    # --remove isolated (i.e., only intersect themselves), short (< 300 m) line segments--
    flowline_per_dissolve2 = arcpy.Dissolve_management(flowline_per, 'in_memory/flowline_per_dissolve2', '', '', 'SINGLE_PART', 'UNSPLIT_LINES')
    flowline_per_join = arcpy.SpatialJoin_analysis(flowline_per_dissolve2, flowline_per_dissolve2, 'in_memory/flowline_per_join', 'JOIN_ONE_TO_ONE', 'KEEP_ALL', '', 'INTERSECT')
    arcpy.AddField_management(flowline_per_join, 'Length', 'DOUBLE')
    arcpy.CalculateField_management(flowline_per_join, 'Length', "!SHAPE.LENGTH@Meters!", 'PYTHON_9.3')
    arcpy.MakeFeatureLayer_management(flowline_per_join, 'flowline_per_join_lyr')
    arcpy.SelectLayerByAttribute_management('flowline_per_join_lyr', 'NEW_SELECTION', """ "Length" < 300 AND "Join_Count" <= 1 """)
    arcpy.SelectLayerByLocation_management('flowline_per_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_per_join_lyr', '', 'NEW_SELECTION')
    arcpy.DeleteFeatures_management('flowline_per_lyr')

    # --select and save final perennial shp--
    arcpy.SelectLayerByAttribute_management('flowline_per_lyr', 'CLEAR_SELECTION')
    arcpy.CopyFeatures_management(flowline_per, outpath)
    arcpy.DeleteIdentical_management(outpath, ['Shape'])
コード例 #23
0
def createUN(jsonFile, outGDB):
    cfgStr = open(jsonFile, 'r', encoding='gbk').read()
    unObj = json.loads(cfgStr)
    unName = unObj["unName"]
    # 创建un和结构网络
    arcpy.env.preserveGlobalIds = True
    arcpy.env.overwriteOutput = True
    arcpy.CreateFileGDB_management(os.path.dirname(outGDB),
                                   os.path.basename(outGDB))
    arcpy.pt.StageUtilityNetwork(outGDB, unObj["territoryFeaCls"],
                                 unObj["feaDS"], unName)
    # Tips:尽量使用相对路径,如有工具不支持再改用绝对路径
    arcpy.env.workspace = os.path.join(outGDB, unObj["feaDS"])

    # 导入fieldDomains,domain是面向GDB级的物理设置
    for domain in unObj["fieldDomains"]:
        dName = domain["name"]
        if domain.get("dtype") == "RANGE":
            arcpy.CreateDomain_management(outGDB, dName,
                                          domain.get("desc", dName),
                                          domain.get("ftype", "SHORT"),
                                          "RANGE")
            arcpy.SetValueForRangeDomain_management(outGDB, dName,
                                                    domain['min'],
                                                    domain['max'])
            continue
        table = arcpy.management.CreateTable('in_memory', dName)[0]  # ?[0]
        arcpy.AddField_management(table, 'code', domain.get("ftype", "SHORT"))
        arcpy.AddField_management(table, 'name', 'TEXT', field_length=254)
        with arcpy.da.InsertCursor(table, ('code', 'name')) as cur:
            for v in domain["values"]:
                cur.insertRow((v["code"], v["name"]))
        arcpy.TableToDomain_management(table,
                                       'code',
                                       'name',
                                       outGDB,
                                       dName,
                                       domain.get("desc", dName),
                                       update_option='REPLACE')
        arcpy.Delete_management(table)

    # 创建除了structure以外的域网络
    for dnObj in unObj["domainNetworks"]:
        if dnObj["name"].lower() != "structure":
            arcpy.AddDomainNetwork_un(unName, dnObj["name"], dnObj["tierDef"],
                                      dnObj["controllerType"],
                                      dnObj.get("alias"))

    # 添加TerminalConfiguration,categories,netAttributes,这些是面向整个UN级的逻辑设置
    # Tips:需要先创建至少一个域网络,才能添加TerminalConfiguration
    terminalConfigs = unObj.get("terminalConfigs")
    if terminalConfigs:
        for terminalCfg in terminalConfigs:
            if terminalCfg["dir"] == "DIRECTIONAL":
                arcpy.AddTerminalConfiguration_un(
                    unName,
                    terminalCfg["name"],
                    "DIRECTIONAL",
                    terminals_directional=terminalCfg["terminals"],
                    valid_paths=terminalCfg["paths"],
                    default_path=terminalCfg.get("default"))
            else:
                arcpy.AddTerminalConfiguration_un(
                    unName,
                    terminalCfg["name"],
                    "BIDIRECTIONAL",
                    terminals_bidirectional=terminalCfg["terminals"],
                    valid_paths=terminalCfg["paths"],
                    default_path=terminalCfg.get("default"))
    # TODO: 网络分组与分层的区别?
    categories = unObj.get("categories")
    if categories:  ## 为什么加这种判断
        for category in categories:
            arcpy.AddNetworkCategory_un(unName, category)
    # TODO:网络属性的可选设置有什么作用?
    netAttributes = unObj.get("netAttributes")
    if netAttributes:
        for attrib in netAttributes:
            arcpy.AddNetworkAttribute_un(unName, attrib["name"],
                                         attrib["type"], attrib.get("inline"),
                                         attrib.get("apportionable"),
                                         attrib.get("domain"),
                                         attrib.get("overridable"),
                                         attrib.get("nullable"),
                                         attrib.get("substitution"),
                                         attrib.get("attrToSubstitution"))

    # 添加子类,创建新字段,指定属性域,指定网络属性,这些是面向Table级的物理设置
    for dnObj in unObj["domainNetworks"]:
        # 子类已经自动设置为ASSETGROUP字段,添加自定义值
        subtypes = dnObj.get("subtypes")
        if subtypes:
            for subtype in subtypes:
                for v in subtype["values"]:
                    arcpy.AddSubtype_management(subtype["feaCls"], v["code"],
                                                v["name"])
                if subtype.get("default"):
                    arcpy.SetDefaultSubtype_management(subtype["feaCls"],
                                                       subtype.get("default"))
        # 添加自定义字段
        newFields = dnObj.get("newFields")
        if newFields:
            for field in newFields:
                length = field.get(
                    "length") if field["type"].upper() == "TEXT" else None
                arcpy.AddField_management(field["feaCls"],
                                          field["name"],
                                          field["type"],
                                          field_length=length,
                                          field_alias=field.get("alias"))
        # 为字段指定属性域
        fDomains = dnObj.get("fieldDomains")
        if fDomains:
            for fd in fDomains:
                arcpy.AssignDomainToField_management(fd["feaCls"],
                                                     fd["fieldName"],
                                                     fd["domainName"],
                                                     fd.get("subtypeCodes"))
                if fd.get("default"):
                    arcpy.AssignDefaultToField_management(
                        fd["feaCls"], fd["fieldName"], fd["default"],
                        fd.get("subtypeCodes"))
        # 为字段指定网络属性
        netAttributes = dnObj.get("netAttributes")
        if netAttributes:
            for attribute in netAttributes:
                for field in attribute["fields"]:
                    fc, fName = field.split("/")
                    fObj = arcpy.ListFields(fc, fName)
                    if fObj:
                        arcpy.SetNetworkAttribute_un(unName, attribute["name"],
                                                     dnObj["name"], fc, fName)

    # 为资产指定多项配置:端子配置、分组、边连通性、角色,这些是面向资产级的逻辑设置
    with open(unObj.get("assetsCSV", "not exist"), 'r', encoding='gbk') as fp:
        reader = csv.reader(fp)  # 读取列为列表
        header = next(
            reader
        )  # ['domainNet', 'feaCls', 'assetName', 'categories', 'terminalCfg', 'edgeConnectivity', 'roleType', 'deletionType', 'viewScale', 'splitType']
        assetCfg = namedtuple('assetCfg', header)
        for row in reader:
            row = assetCfg(*row)
            asset = row.assetName.split('/')
            if row.terminalCfg:
                arcpy.SetTerminalConfiguration_un(unName, row.domainNet,
                                                  row.feaCls, *asset,
                                                  row.terminalCfg)
            if row.categories:
                arcpy.SetNetworkCategory_un(unName, row.domainNet, row.feaCls,
                                            *asset, row.categories)
            if row.edgeConnectivity:  # 边联通非空
                arcpy.SetEdgeConnectivity_un(unName, row.domainNet, row.feaCls,
                                             *asset, row.edgeConnectivity)
            if row.roleType:
                arcpy.SetAssociationRole_un(unName, row.domainNet, row.feaCls,
                                            *asset, row.roleType,
                                            row.deletionType, row.viewScale,
                                            row.splitType)

    # 创建tier,并设置子网定义,这些是面向子网级的逻辑设置
    # TODO: subnetwork_field_name有什么作用?subnetDef还有很多可选设置
    for dnObj in unObj["domainNetworks"]:
        dnName = dnObj["name"]
        if dnName.lower() != "structure":
            # tierGroups
            tierGroups = dnObj.get("tierGroups")
            if tierGroups and dnObj["tierDef"] == "HIERARCHICAL":
                for groupName in tierGroups:
                    arcpy.AddTierGroup_un(unName, dnName, groupName)
            tiers = dnObj.get("tiers")
            if tiers:
                for tier in tiers:
                    if dnObj["tierDef"] == "HIERARCHICAL":
                        arcpy.AddTier_un(
                            unName,
                            dnName,
                            tier["name"],
                            tier["rank"],
                            topology_type="MESH",
                            tier_group_name=tier.get("groupName"),
                            subnetwork_field_name=tier["subnetField"])
                    else:
                        arcpy.AddTier_un(unName,
                                         dnName,
                                         tier["name"],
                                         tier["rank"],
                                         topology_type=tier["topo"])
                    arcpy.SetSubnetworkDefinition_un(
                        unName,
                        dnName,
                        tier["name"],
                        tier["disjoint"],
                        tier["devices"],
                        tier["controllers"],
                        tier.get("lines"),
                        tier.get("aggregated"),
                        tier.get("diagrams"),
                        include_barriers=tier.get("barriers"),
                        traversability_scope=tier.get("traverse"))

    # TODO: 导入rule
    arcpy.ImportRules_un(unName, "All", "E:/ArcGIS/unnet/rules.csv")
    # TODO: 导入数据
    # 数据导入是基于子类的,把要素类路径写入到子类中,修改了demoUN域网络的子类型值
    for dnObj in unObj["domainNetworks"]:
        subtypes = dnObj.get("subtypes")
        if subtypes:
            for subtype in subtypes:
                for v in subtype["values"]:
                    arcpy.Append_management(subtype["path"],
                                            subtype["feaCls"],
                                            "NO_TEST",
                                            subtype=v["name"])
コード例 #24
0
                                codeblock)

#### Add information to 'Description'
field_Description = "Description"

codeblock = """
def reclass(name):
    if name:
        desc = "PHOTO"
        return desc
    else:
        return None """

expression = "reclass(!{}!)".format(field_Name)
arcpy.CalculateField_management(photoLayer, field_Description, expression,
                                "PYTHON3", codeblock)

## 4. Trucnate original point feature layer
arcpy.DeleteRows_management(originalFeature)

## Convert to PRS92
copied = "copied_layer"
copyL = arcpy.CopyFeatures_management(photoLayer, copied)

## 5. Append the new point feature layer to the original
arcpy.Append_management(copyL, originalFeature, schema_type='NO_TEST')

## 99. Delete
deleteLayer = [photoLayer, copyL]
arcpy.Delete_management(deleteLayer)
コード例 #25
0
    if out_poly == 'true':

        arcpy.AddMessage("Converting to Int Raster for polygon generation")
        arcpy.Int_3d(numpyRaster, tempRaster)

        fc = Dir + '\\' + pointName + '.shp'
        fc = fc.replace(" ", "_")

        arcpy.AddMessage("Converting to Polygon")
        arcpy.RasterToPolygon_conversion(tempRaster, fc, 'NO_SIMPLIFY', 'Value', 'MULTIPLE_OUTER_PART')

        arcpy.AddField_management(fc, "Name", "TEXT")
        arcpy.AddField_management(fc, "FeatureID", "TEXT")

        arcpy.DeleteField_management(fc, "gridcode")

        with arcpy.da.UpdateCursor(fc, ['Name', 'FeatureID']) as cursor:
            for row in cursor:
                row = [pointName, pointFID]
                print(row)
                cursor.updateRow(row)

        print(polygonfc)
        print(fc)

        arcpy.AddMessage("Appending to feature class")
        arcpy.Append_management(fc, polygonfc, 'TEST')

        arcpy.Delete_management(fc)
コード例 #26
0
        except:

            print "ERROR! You are missing an FQNID - please populate and re-run the tool."

            exit()

num_of_rows = int(arcpy.GetCount_management("csvTableView").getOutput(0))

print str(num_of_rows) + " - NEW RECORDS TO BE APPENDED"
print("\n")

arcpy.Append_management(
    inputs="csvTableView",
    target=sdeTable,
    schema_type="NO_TEST",
    field_mapping=
    'FQN_ID "FQN_ID" true true false 50 Text 0 0 ,First,#,"csvTableView",Segment_Name__FQNID_,-1,-1;CONSTR_START_PLANNED "CONST_START_PLANNED" true true false 8 Date 0 0 ,First,#,"csvTableView",Construction_Start_Planned,-1,-1;CONSTR_START_ACTUAL "CONSTR_START_ADATE" true true false 8 Date 0 0 ,First,#,"csvTableView",Construction_Start_Actual,-1,-1;CABLE_PLACED_PLANNED "CABLE_PLACED_PDATE" true true false 8 Date 0 0 ,First,#,"csvTableView",Cable_Placed_Planned,-1,-1;CABLE_PLACED_ACTUAL "CABLE_PLACED_ADATE" true true false 8 Date 0 0 ,First,#,"csvTableView",Cable_Placed_Actual,-1,-1;SPLICE_TEST_PLANNED "SPLICE_TEST_PDATE" true true false 8 Date 0 0 ,First,#,"csvTableView",Splice___Test_Planned,-1,-1;SPLICE_TEST_ACTUAL "SPLICE_TEST_ADATE" true true false 8 Date 0 0 ,First,#,"csvTableView",Splice___Test_Actual,-1,-1;OPERATION_TYPE "OPERATION_TYPE" true true false 50 Text 0 0 ,First,#;VENDOR_REFERENCE_ID "VENDOR_REFERENCE_ID" true true false 50 Text 0 0 ,First,#;CONDUIT_PARTIALLYPLACED_ACTUAL "CONDUIT_PARTIALLYPLACED_ACTUAL" true true false 8 Date 0 0 ,First,#,"csvTableView",CONDUIT_PARTIALLYPLACED_ACTUAL,-1,-1;CONDUIT_PLACED_ESTIMATED "CONDUIT_PLACED_ESTIMATED" true true false 8 Date 0 0 ,First,#,"csvTableView",CONDUIT_PLACED_ESTIMATED,-1,-1;CONDUIT_PLACED_ACTUAL "CONDUIT_PLACED_ACTUAL" true true false 8 Date 0 0 ,First,#,"csvTableView",CONDUIT_PLACED_ACTUAL,-1,-1',
    subtype="")

print "... " + (arcpy.GetMessages())
print("\n")

##### Insert an "I" into the newly added rows #####

arcpy.SelectLayerByAttribute_management("sdeTableView", "NEW_SELECTION",
                                        "OPERATION_TYPE IS NULL")

num_of_rows2 = int(arcpy.GetCount_management("sdeTableView").getOutput(0))

arcpy.CalculateField_management("sdeTableView", "OPERATION_TYPE", "'I'",
コード例 #27
0
def main(in_features,
         date_field,
         init_date,
         spatial_band_size,
         spatial_half,
         temporal_band_size,
         temporal_half,
         probability_type,
         out_raster,
         out_polygon,
         slice_num,
         pub_polys='',
         pub_type='',
         username='',
         password='',
         server_url='',
         poly_url='',
         *args):
    """ Generates a raster and series of polygons based on that raster to
        illustrate the probability of incidents occuring at the current moment
        in time based on defined algorithms for the decay of spatial and
        temporal influence of previous incidents.

        in_features: Point feature class or shapefile showing the location of
                     incidents that have occured recently, and from which
                     predictions will be based. This feature class must have a
                     date field and all features must have date values.

        date_field: Field in in_features containing the date each incident
                    occurred. Values in this field are used to calculate the
                    decay of temporal influence between when the incident
                    occured and the current date.

        spatial_band_size: Value in the units of in_features representing the
                           maximum reach of spatial influence of historical
                           incidents.

        temporal_band_size: Value in days representing the maximum reach of
                            temporal influence of historical incidents.
                            Features in in_features where todays date minus the
                            incident date results in a number of days greater
                            than this value will not be considered when
                            creating the prediction zones.

        init_date: Initial processing date. All incidents with dates between
                   this date and init_date - temporal_band_size will be
                   included in the report

        probability_type: 'CUMULATIVE' (default) creates a surface resulting
                          from summing the prediction risks from each incident;
                          'MAXIMUM' creates a surface representing the maximum
                          risk value from each incident.

        out_raster: Location for output incident prediction surface raster.
                    Raster name will have timestamp.

        out_polygon_fc: Output polygon feature class based on classifying the
                        out_raster values into slice_num categories.
                        Polygon boundaries represent the bounds of the
                        prediction zones as defined by the raster slices.

        slice_num: Integer value representing the number of zones that will be
                   created from the prediction raster. Each zone will represent
                   a range of prediction risk values.

        pub_polys: booleen option for publishing the polygon features. Service
                   must exist previously. Service will be truncated and the
                   cumulative results from in_features will be appended

        init_date: initial processing date.
        pub_type: Choice of publication environments- NONE, ARCGIS_ONLINE,
                  ARCGIS_PORTAL, ARCGIS_SERVER

        username: administrative username for the service

        password: corresponding to the username

        server_url: organization url

        poly_url: URL to the rest endpoint of the polygon service layer
    """

    try:
        i = 0
        arcpy.SetProgressor("default")
        arcpy.SetProgressorLabel('Initializing...')

        # Check out spatial analyst extentsion
        if arcpy.CheckExtension("Spatial") == "Available":
            arcpy.CheckOutExtension("Spatial")
        else:
            raise Exception("Spatial Analyst license unavailable")

        now = dt.strftime(dt.now(), "%y%m%d%H%M%S")

        # Convert booleen values
        if not pub_polys == 'True':
            pub_polys = False

        # Get init_date value
        if init_date == 'TODAY':
            init_date = today
        elif init_date == 'YESTERDAY':
            init_date = today - td(days=1)
        else:
            try:
                init_date = dt.strptime(init_date, "%Y-%m-%d")
            except ValueError:
                raise Exception(
                    "Invalid date format. Initial Date must be in the format yyyy-mm-dd."
                )

        # Work in an in-memory copy of the dataset to avoid editing the original
        incident_fc = arcpy.FeatureClassToFeatureClass_conversion(
            in_features, "in_memory", 'temp_incs')

        # Get OID field name
        oidname = arcpy.Describe(incident_fc).oidFieldName

        # Expand the extents of the dataset by the size of the spatial band
        #   rasters will represent the full extent of risk,
        #   not bound to extents of incidents
        expand_extents(incident_fc, float(spatial_band_size))

        # SelectLayerByAttributes tool requires feature layer
        incident_lyr = arcpy.MakeFeatureLayer_management(incident_fc)

        # Create in-memory summary raster with max extents
        d = arcpy.Describe(incident_fc)
        sr = d.spatialReference
        arcpy.env.extent = d.extent

        sum_raster = arcpy.sa.CreateConstantRaster(0,
                                                   data_type='INTEGER',
                                                   extent=d.extent)

        # Calculate minimum bounds of accepted time frame
        date_min = init_date - td(days=int(temporal_band_size))

        # Create risk rasters for each incident within temporal reach of today
        sql = """{0} <= date'{1}' AND {0} > date'{2}'""".format(
            date_field, init_date, date_min)
        numrows = 0
        with arcpy.da.SearchCursor(incident_fc, "OID@",
                                   where_clause=sql) as rows:
            for row in rows:
                numrows += 1

        with arcpy.da.SearchCursor(incident_fc, ['OID@', date_field],
                                   where_clause=sql) as incidents:
            count = 0
            for incident in incidents:
                arcpy.SetProgressorLabel(
                    'Calculating influence of incident {} of {}...'.format(
                        count + 1, numrows))

                # Calculate age of incident
                try:
                    date_diff = init_date - incident[1].date()
                except TypeError:
                    date_diff = init_date.date() - incident[1].date()

                # Build float distance raster for incident
                sql = """{} = {}""".format(oidname, incident[0])
                arcpy.SelectLayerByAttribute_management(incident_lyr,
                                                        where_clause=sql)

                inc_raster = calculate_risk_surface(incident_lyr,
                                                    date_diff.days,
                                                    spatial_band_size,
                                                    float(temporal_half),
                                                    float(spatial_half))

                # Process cumulative risk
                if probability_type == 'CUMULATIVE':
                    sum_raster += inc_raster

                # Process maximum risk
                else:
                    sum_raster = calculate_max_risk(sum_raster, inc_raster)

                count += 1

        if not count:
            raise Exception('No incidents found between {} and {}'.format(
                date_min, init_date))
        else:
            arcpy.AddMessage("{} incidents found.".format(count))

        # Save final probability raster where values are > 0
        arcpy.SetProgressorLabel('Saving final raster...')

        sum_raster = arcpy.sa.SetNull(sum_raster, sum_raster, "Value <= 0")
        out_raster_name = ''.join([out_raster, os.sep, 'p', now])
        sum_raster.save(out_raster_name)
        arcpy.SetParameterAsText(18, out_raster_name)

        # Slice raster values into categories and convert to temp polys
        arcpy.SetProgressorLabel('Creating polygons...')

        temp_polys = convert_raster_to_zones(sum_raster, slice_num,
                                             cur_status_field, cur_date_field)

        # Creat polygon fc if it doesn't exist
        if not arcpy.Exists(out_polygon):
            create_zone_fc(temp_polys, sr, out_polygon)

        # Create status fields if they don't exist
        add_status_fields_to_lyr(out_polygon)

        # Set status of all existing features to False
        sql = """{} <> 'False'""".format(cur_status_field)
        with arcpy.da.UpdateCursor(out_polygon,
                                   cur_status_field,
                                   where_clause=sql) as rows:
            for row in rows:
                row[0] = 'False'
                rows.updateRow(row)

        # Append temp poly features to output polygon fc
        arcpy.Append_management(temp_polys, out_polygon)
        arcpy.SetParameterAsText(17, out_polygon)

        # Update polygon services.
        # If pubtype = NONE or SERVER, no steps necessary

        if pub_type in ['ARCGIS_ONLINE', 'ARCGIS_PORTAL'] and pub_polys:
            arcpy.SetProgressorLabel('Updating polygon feature service...')

            # connect to incidents service
            try:
                fl = connect_to_layer(username, password, server_url, poly_url)
            except:
                raise Exception('Could not update service. Please verify '
                                'organization URL and service URL are '
                                'correct, and the provided username and '
                                'password have access to the service.')

            # Check service for status, creation, risk fields. add if necessary
            add_status_field_to_service(fl)

            # Update 'current' features in service to be 'past'
            field_info = [{
                'FieldName': cur_status_field,
                'ValueToSet': 'False'
            }]

            out_fields = ['objectid']
            for fld in field_info:
                out_fields.append(fld['FieldName'])

            sql = """{} = 'True'""".format(cur_status_field)
            updateFeats = fl.query(where=sql, out_fields=','.join(out_fields))

            for feat in updateFeats:
                for fld in field_info:
                    feat.set_value(fld['FieldName'], fld['ValueToSet'])

            fl.updateFeature(features=updateFeats)

            # Add new 'current' features
            fl.addFeatures(temp_polys)

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        print(msgs)

    except:

        # Return  error messages for use in script tool or Python Window
        arcpy.AddError(str(sys.exc_info()[1]))

        # Print Python error messages for use in Python / Python Window
        print("\n" + str(sys.exc_info()[1]) + "\n")

    finally:
        arcpy.SetProgressorLabel('Completed.')
        arcpy.CheckInExtension("Spatial")
コード例 #28
0
                                                "NEW_SELECTION", slt_qry_down)
        #may need to make this selection into single feature for population as raster
        #arcpy.Dissolve_management("catchment_lyr", flood_areaD_clip_single)

        #select and clip corresponding flood zone
        arcpy.SelectLayerByAttribute_management("flood_zone_lyr",
                                                "NEW_SELECTION", where_clause)
        arcpy.Clip_analysis("flood_zone_lyr", "catchment_lyr",
                            flood_areaD_clip)
        arcpy.MakeFeatureLayer_management(flood_areaD_clip,
                                          "flood_zone_down_lyr")
        arcpy.Dissolve_management("flood_zone_down_lyr",
                                  flood_areaD_clip_single)

        #append to empty clipped set
        arcpy.Append_management(flood_areaD_clip_single, flood_areaD)
        clip_rows = arcpy.GetCount_management(flood_areaD)
        arcpy.AddMessage(
            "Determine catchments downstream for row {}, of {}".format(
                clip_rows, site_cnt))
        print("Determine catchments downstream for row {}, of {}".format(
            clip_rows, site_cnt))

arcpy.AddMessage(
    "Finished reducing flood zone areas to downstream from sites...")
print("Finished reducing flood zone areas to downstream from sites...")

#step 3C: calculate flood area as benefitting percentage
arcpy.AddMessage("Measuring flood zone area downstream of each site...")
print("Measuring flood zone area downstream of each site...")
コード例 #29
0
def build_basketball_court(output_gdb, output_feature_class):
    print('Creating basketball court.')
    fields = ('SHAPE@', 'NAME')
    fc = os.path.join(output_gdb, output_feature_class)
    if not arcpy.Exists(os.path.join(output_gdb, output_feature_class)):
        arcpy.CreateFeatureclass_management(output_gdb, output_feature_class,
                                            "POLYGON", "#", "DISABLED",
                                            "DISABLED",
                                            arcpy.SpatialReference(3857))
        arcpy.AddField_management(fc, fields[1], "TEXT", field_length=20)

    cursor = arcpy.da.InsertCursor(fc, fields)

    field = [(-250, -52.5), (250, -52.5), (250, 940 - 52.5),
             (-250, 940 - 52.5)]
    cursor.insertRow([field, "Court"])

    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 190 - 52.5))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\key1", 60)
    arcpy.Append_management("in_memory\\key1", fc, "NO_TEST", "", "")

    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 940 - (190 + 52.5)))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\key2", 60)
    arcpy.Append_management("in_memory\\key2", fc, "NO_TEST", "", "")

    whole_key = [(-80, -52.5), (80, -52.5), (80, 190 - 52.5),
                 (-80, 190 - 52.5)]
    cursor.insertRow([whole_key, "Paint Extended"])

    whole_key = [(-80, 940 - 52.5), (80, 940 - 52.5), (80, 940 - (190 + 52.5)),
                 (-80, 940 - (190 + 52.5))]
    cursor.insertRow([whole_key, "Paint Extended"])

    paint = [(-60, 940 - 52.5), (60, 940 - 52.5), (60, 940 - (190 + 52.5)),
             (-60, 940 - (190 + 52.5))]
    cursor.insertRow([paint, "Paint"])

    paint = [(-60, -52.5), (60, -52.5), (60, 190 - 52.5), (-60, 190 - 52.5)]
    cursor.insertRow([paint, "Paint"])

    ##    rim = [(-60, -5.5),
    ##             (60, -5.5),
    ##             (60, 19-5.5),
    ##             (-60, 19-5.5)]

    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 0))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\rim1", 12.5)
    arcpy.Append_management("in_memory\\rim1", fc, "NO_TEST", "", "")

    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 835))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\rim2", 12.5)
    arcpy.Append_management("in_memory\\rim2", fc, "NO_TEST", "", "")

    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 470 - 52.5))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\midcourt", 60)
    arcpy.Append_management("in_memory\\midcourt", fc, "NO_TEST", "", "")

    #pt_geometry = arcpy.PointGeometry(arcpy.Point(0,470-55))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\midcourt_inner", 20)
    arcpy.Append_management("in_memory\\midcourt_inner", fc, "NO_TEST", "", "")
    #cursor.insertRow([paint, "Rim"])

    print("Done.")
コード例 #30
0
# Merge individual polygons and tables
print ' ====================================================='
print ' ====================================================='
print 'Total number of processed polygons ' + str(n_poly)

# Merge shapes
print 'Merging all shapefiles...'

# Populate list with 1st element
sample_fcs = fcs_list[0]
arcpy.CopyFeatures_management(sample_fcs, os.path.join(ws, out_shp_name))

# Iterate through all but 1st list element (already completed)
iterfcs = iter(fcs_list)
next(iterfcs)
for fcs in iterfcs:
    arcpy.Append_management([fcs], os.path.join(ws, out_shp_name), "NO_TEST")

# Clean up
print 'Deleting files...'
arcpy.Delete_management('in_memory')

for fc in fcs_list:
    arcpy.Delete_management(fc)

print 'Done! Files written to: '
print os.path.join(ws, out_shp_name)

ts1 = time.time()
print 'Time elapsed: ' + str(ts1 - ts0) + ' seconds'