예제 #1
0
def gpsTracks(output, lapDictionary, timeDictionary):
    import arcpy, os
    arcpy.env.overwriteOutput = True

    # Create new polyline feature class using WGS84
    sr = arcpy.SpatialReference(4326)
    outputFolder = os.path.dirname(output)
    outputFile = os.path.basename(output)
    arcpy.CreateFeatureclass_management(outputFolder, outputFile, "POLYLINE",
                                        "", "", "", sr)
    arcpy.AddFields_management(output,
                               [["lapNo", "SHORT"], ["lapTime", "TEXT", 20]])

    # Add polyline features created from coordinates in lapDictionary
    with arcpy.da.InsertCursor(output, ("SHAPE@", "lapNo")) as cursor:
        for lap in lapDictionary:
            points = lapDictionary[lap]
            cursor.insertRow((points, lap))
    del cursor

    # Add the lap times for all laps except first and last
    with arcpy.da.UpdateCursor(output, ("lapNo", "lapTime")) as cursor:
        for row in cursor:
            if row[0] in timeDictionary:
                row[1] = timeDictionary[row[0]]
                cursor.updateRow(row)
            else:
                row[1] = "enter / leave track"
                cursor.updateRow(row)
    del cursor
예제 #2
0
def clipPolygons(census_gdb, census_file, boundary_file, output_gdb,
                 output_file):
    init_file = f'{census_file}_init'  # ! does this just make a copy for safety?

    # copy files to new gdb
    ap.env.workspace = census_gdb
    ap.FeatureClassToFeatureClass_conversion(census_file, output_gdb,
                                             init_file)

    # switch to output gdb
    ap.env.workspace = output_gdb

    # rename fields
    ap.AddFields_management(init_file,
                            [['SqMiles', 'DOUBLE'], ['SqMiles_Clip', 'DOUBLE'],
                             ['Coverage', 'DOUBLE']])

    # calculate the initial sq miles for the unclipped census polygons
    calcSqMiles(init_file, 'SqMiles')

    # clip and recalculate sq miles in SqMIiles_clip field
    ap.Clip_analysis(init_file, boundary_file, output_file)
    calcSqMiles(output_file, 'SqMiles_clip')

    # calculate percent coverage of clip
    ap.CalculateField_management(output_file, 'coverage',
                                 '!SqMiles_clip! / !SqMiles!', 'PYTHON3')
예제 #3
0
    def make_fc(self, gdb, fc_name, fields, rows, geotype, geotoken):
        """
        Reusable logic for building the feature class.

        :param gdb: file geodatabase to store the output in
        :param fc_name: name of feature class
        :param fields: list of field headings
        :param rows: nested list of (potentially) point, line and polygon rows
        :param geotype: Esri-recognized geometric type designator (str)
        :param geotoken: Esri geometric token
        """
        if not arcpy.Exists(fc_name):
            fc = arcpy.CreateFeatureclass_management(gdb,
                                                     fc_name,
                                                     geotype,
                                                     spatial_reference=self.sr)
            # print(fields)
            arcpy.AddFields_management(fc, fields)
            field_list = sorted(GeoJSONUtils.build_schema(self).get('fields'))
            field_list.insert(0, geotoken)
            field_map = tuple(field_list)
            # print(field_map)
            with arcpy.da.InsertCursor(fc, field_map) as cursor:
                for row in rows:
                    try:
                        cursor.insertRow(row)
                    except RuntimeError as re:
                        print(f"Problem inserting row, {re}")

            fc_stats = {"name": fc_name, "rows": len(rows)}
            return fc_stats
예제 #4
0
def convertAltStreets(Project_Folder):
    arcpy.env.overwriteOutput = True

    Model_Inputs_gdb = os.path.join(Project_Folder, 'Model_Inputs.gdb')
    Model_Outputs_gdb = os.path.join(Project_Folder, 'Model_Outputs.gdb')

    streets_simple = os.path.join(Model_Outputs_gdb, 'Streets_Simple')
    altstreets = os.path.join(Model_Inputs_gdb, 'AltStreets')

    arcpy.env.workspace = Model_Inputs_gdb

    # Simplify AltStreets and Streets Lines
    # removes some of the nodes that make up the lines to make the files low resolution enough to be uploaded through mapmaker
    altstreets_simple = arcpy.SimplifyLine_cartography(in_features=altstreets, out_feature_class=os.path.join(Model_Outputs_gdb, "AltStreet_simple"), algorithm="POINT_REMOVE",
                                                       tolerance="5 Feet", error_resolving_option="RESOLVE_ERRORS", collapsed_point_option="KEEP_COLLAPSED_POINTS", error_checking_option="CHECK", in_barriers=[])[0]

    # add ref_zlev and dom fields for alias classification and linking to streets file
    arcpy.AddFields_management(in_table=altstreets_simple, field_description=[
                               ["REF_ZLEV", "SHORT"], ["DOM", "LONG"]])
    print('added fields to altstreets')

    arcpy.AddIndex_management(altstreets_simple, fields=[
                              "LINK_ID"], index_name="LINK_ID", unique="NON_UNIQUE", ascending="ASCENDING")
    print('added altstreet index')

    arcpy.JoinField_management(in_data=altstreets_simple, in_field="LINK_ID",
                               join_table=streets_simple, join_field="LINK_ID", fields=["NUM_STNMES"])
    print('joined altstreets to streets')

    # Filter out all of the altstreet rows that do not have multiple names
    altstreets_filter = arcpy.FeatureClassToFeatureClass_conversion(
        in_features=altstreets_simple, out_path=Model_Outputs_gdb, out_name="AltStreets_Filter", where_clause="NUM_STNMES > 1")
    print('altstreets filtered if less than 2')

    # Create Statistics Table from AltStreets_Simple
    # add in the count of all the street names added to the altstreets simple
    altstreet_stats = os.path.join(Model_Outputs_gdb, "Altstreets_Stats")
    arcpy.Statistics_analysis(in_table=altstreets_filter, out_table=altstreet_stats, statistics_fields=[
                              ["LINK_ID", "FIRST"]], case_field=["LINK_ID", "ST_NAME"])

    # Join AltStreets_Simple with AltStreets_Stats
    arcpy.JoinField_management(in_data=altstreets_simple, in_field="LINK_ID",
                               join_table=altstreet_stats, join_field="LINK_ID", fields=["NUM_STNMES"])

    arcpy.CalculateField_management(in_table=altstreets_simple, field="Dom",
                                    expression="1", expression_type="PYTHON3", code_block="", field_type="TEXT")

    # Alias streetname identifier calculation (Alias == -9)
    # MapMaker REQUIRES it to be -9 in order to find it as an alias field
    arcpy.CalculateField_management(in_table=altstreets_simple, field="REF_ZLEV",
                                    expression="-9", expression_type="PYTHON3", code_block="", field_type="TEXT")

    # updated the schema to match mapmaker schema
    updateSchema(altstreets_simple)

    # returns altstreets_final gdb location
    return arcpy.FeatureClassToFeatureClass_conversion(in_features=altstreets_simple, out_path=Model_Outputs_gdb, out_name="AltStreets_Final")[0]
def add_fields(table):
    """
    Add fields using AddFields tool

    NOTE: AddFields is only in Pro. Was added with the intent of bulk adding
    fields more efficiently.
    """

    arcpy.AddFields_management(table,
                               [['infield', 'LONG'],
                                ['Name', 'TEXT', None, 10],
                                ['Descript', 'TEXT', None, 10],
                                ['Type', 'TEXT', None, 255],
                                ['Comment', 'TEXT', None, 10],
                                ['Symbol', 'TEXT', None, 10],
                                ['DateTimeS', 'TEXT', None, 10],
                                ['X', 'FLOAT'],
                                ['Y', 'FLOAT']]
                               )
 def make_featureclass(self, rows, fc_name, geo_type, geo_token):
     """
     Reusable feature class creation logic
     """
     if not arcpy.Exists(fc_name):
         fc = arcpy.CreateFeatureclass_management(self.gdb,
                                                  fc_name,
                                                  geo_type,
                                                  spatial_reference=self.sr)
         arcpy.AddFields_management(fc, self.fields)
     else:
         fc = fc_name
     fl = SOMFeatures.get_fieldmap_list(self)
     fl.insert(0, geo_token)
     fieldmap = tuple(fl)
     with arcpy.da.InsertCursor(fc, fieldmap) as cursor:
         for row in rows:
             cursor.insertRow(row)
     fc_stats = {"name": fc_name, "rows": len(rows)}
     return fc_stats
def get_polygon_coord(infeature):
    temptfeature = infeature + 'tempt'
    tempttable = infeature + 'tempttable'
    # 将坐标系转为大地坐标系,并计算经纬度
    arcpy.Project_management(
        in_dataset=infeature,
        out_dataset=temptfeature,
        out_coor_system=
        "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]",
        transform_method=[],
        in_coor_system=
        "PROJCS['CGCS2000_3_Degree_GK_Zone_35',GEOGCS['GCS_China_Geodetic_Coordinate_System_2000',DATUM['D_China_2000',SPHEROID['CGCS2000',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Gauss_Kruger'],PARAMETER['False_Easting',35500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',105.0],PARAMETER['Scale_Factor',1.0],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]",
        preserve_shape="NO_PRESERVE_SHAPE",
        max_deviation="",
        vertical="NO_VERTICAL")
    arcpy.AddFields_management(
        in_table=temptfeature,
        field_description=[["Left", "DOUBLE", "", "", "", ""],
                           ["Bottom", "DOUBLE", "", "", "", ""],
                           ["Right", "DOUBLE", "", "", "", ""],
                           ["Top", "DOUBLE", "", "", "", ""]])
    # 计算矩形四个角点的经纬度
    arcpy.CalculateGeometryAttributes_management(
        temptfeature, [["Left", "EXTENT_MIN_X"], ["Bottom", "EXTENT_MIN_Y"],
                       ["Right", "EXTENT_MAX_X"], ["Top", "EXTENT_MAX_Y"]])
    arcpy.Statistics_analysis(in_table=temptfeature,
                              out_table=tempttable,
                              statistics_fields=[["Left", "MIN"],
                                                 ["Bottom", "MIN"],
                                                 ["Right", "MAX"],
                                                 ["Top", "MAX"]],
                              case_field=[])
    cursor = arcpy.SearchCursor(tempttable)
    original_coord = []
    for row in cursor:
        leftmin = row.MIN_Left
        rightmax = row.MAX_Right
        bottommin = row.MIN_Bottom
        topmax = row.MAX_Top
        original_coord = [leftmin, topmax, rightmax, bottommin]
    return original_coord
예제 #8
0
def Model(property_path):  # Model

    # To allow overwriting outputs change overwriteOutput option to True.
    arcpy.env.overwriteOutput = True

    arcpy.ImportToolbox(r"c:\program files\arcgis\pro\Resources\ArcToolbox\toolboxes\GeoAnalytics Desktop Tools.tbx")
    arcpy.ImportToolbox(r"c:\program files\arcgis\pro\Resources\ArcToolbox\toolboxes\Data Management Tools.tbx")
    arcpy.ImportToolbox(r"c:\program files\arcgis\pro\Resources\ArcToolbox\toolboxes\Conversion Tools.tbx")

    soil_carbon = "soil_carbon"

    property_shp = property_path
    property_name = property_path[24:-4] # extracts property name from path string

    # Process: Clip Layer (Clip Layer) 
    property_soil_shp = "D:\\Greenbelt\\soils\\April\\Soil\\" + property_name + "_Soil.shp"
    arcpy.gapro.ClipLayer(input_layer=soil_carbon, clip_layer=property_shp, out_feature_class=property_soil_shp)

    # Process: Add Fields (multiple) (Add Fields (multiple)) 
    Property_1 = arcpy.AddFields_management(in_table=property_soil_shp, field_description=[["low_kg", "FLOAT", "", "", "", ""], ["med_kg", "FLOAT", "", "", "", ""], ["high_kg", "FLOAT", "", "", "", ""], ["Area_m2", "FLOAT", "", "", "", ""]])[0]

    # Process: Calculate Geometry Attributes (Calculate Geometry Attributes) 
    Property_1 = arcpy.management.CalculateGeometryAttributes(in_features=Property_1, geometry_property=[["Area_m2", "AREA"]], length_unit="", area_unit="SQUARE_METERS", coordinate_system="", coordinate_format="SAME_AS_INPUT")[0]

    # Process: Calculate Field (Calculate Field) 
    Property_1 = arcpy.management.CalculateField(in_table=Property_1, field="low_kg", expression="!low_Carbon! * !Area_m2!", expression_type="PYTHON3", code_block="", field_type="TEXT")[0]

    # Process: Calculate Field (2) (Calculate Field) 
    Property_1 = arcpy.management.CalculateField(in_table=Property_1, field="med_kg", expression="!med_Carbon! * !Area_m2!", expression_type="PYTHON3", code_block="", field_type="TEXT")[0]

    # Process: Calculate Field (3) (Calculate Field) 
    Property_1 = arcpy.management.CalculateField(in_table=Property_1, field="high_kg", expression="!high_Carbo! * !Area_m2!", expression_type="PYTHON3", code_block="", field_type="TEXT")[0]

    # Process: Summary Statistics (Summary Statistics) 
    property_soil_carbon = "D:\\Greenbelt\\soils\\April\\SoilCarbon\\" + property_name + "_SoilCarbon"
    arcpy.Statistics_analysis(in_table=Property_1, out_table=property_soil_carbon, statistics_fields=[["low_kg", "SUM"], ["med_kg", "SUM"], ["high_kg", "SUM"]], case_field=["Cover"])

    # Process: Table To Excel (Table To Excel) 
    property_soil_carbon_xlsx = "D:\\Greenbelt\\soils\\April\\SoilCarbonTables\\" + property_name + "_SoilCarbon.xlsx"
    arcpy.conversion.TableToExcel(Input_Table=property_soil_carbon, Output_Excel_File=property_soil_carbon_xlsx, Use_field_alias_as_column_header="NAME", Use_domain_and_subtype_description="CODE")
def create_fc(out_workspace):

    desc = arcpy.Describe(out_workspace)
    # Checks to see if the output workspace is a file geodatabase.  If not, returns error and forces
    # user to use a file geodatabase.
    if desc.dataType != 'Workspace':
        arcpy.AddError(
            'Please select a file geodatabase to output your files.')
        raise ValueError
    # Create output feature class
    arcpy.AddMessage(
        "Creating output feature class in {}".format(out_workspace))

    # Output feature class spatial reference
    sr = arcpy.SpatialReference(4326)

    # Create the Feature Class
    arcpy.CreateFeatureclass_management(out_workspace, "Locations", "POINT",
                                        "", "", "", sr)
    out_fc = os.path.join(out_workspace, "Locations")

    arcpy.AddFields_management(out_fc, [
        ['document', 'TEXT', 'Document', 255, '', ''],
        ['entity_id', 'TEXT', 'Entity Id', 255, '', ''],
        ['entity_type', 'TEXT', 'Entity Type', 255, '', ''],
        ['extracted_value', 'TEXT', 'Extracted Value', 255, '', ''],
        ['pre_text', 'TEXT', 'Pre-Text', 255, '', ''],
        ['post_text', 'TEXT', 'Post-Text', 255, '', ''],
        ['lon', 'FLOAT'],
        ['lat', 'FLOAT'],
    ])

    fields = [
        "SHAPE@XY", 'document', 'entity_id', "entity_type", "extracted_value",
        "pre_text", "post_text", "lon", "lat"
    ]

    return out_fc, fields
        selectionCountryLayer = arcpy.SelectLayerByAttribute_management(
            countries, 'NEW_SELECTION', whereCountry)
        playersLayer = arcpy.SelectLayerByLocation_management(
            players, 'WITHIN', selectionCountryLayer)
        wherePosition = playerPosition + " =  '" + positionCode + "'"
        playersSubLayer = arcpy.SelectLayerByAttribute_management(
            playersLayer, 'SUBSET_SELECTION', wherePosition)

        # Execute Copy Features and Add Fields
        outputFC = "nhlRoster_" + nationOfOrigin + "_" + positionCode + ".shp"
        arcpy.CopyFeatures_management(playersSubLayer, outputFC)
        fields = [
            "weight", "weight_kg", "height", "height_cm", "birthDate", "zodiac"
        ]
        arcpy.AddFields_management(
            outputFC, [["weight_kg", "FLOAT", 6], ["height_cm", "FLOAT", 6],
                       ["zodiac", "TEXT", 12]])

        # Use update cursor to populate metric height and weight fields and the zodiac sign field
        with arcpy.da.UpdateCursor(outputFC, fields) as cursor:
            for row in cursor:
                # Convert weight to kg and height to cm
                row[1] = row[0] * 0.453592
                feet, inches = row[2].rsplit("' ", 1)
                inches = inches[:-1]
                row[3] = ((int(feet) * 12) + int(inches)) * 2.54
                cursor.updateRow(row)
                # Convert birthDate to string and populate the zodiac sign field
                bdaystr = str(row[4])
                year, strmonth, rest = bdaystr.rsplit("-", 2)
                month = int(strmonth)
예제 #11
0
def routeBuffers(config):
    ap.env.overwriteOutput = True

    date = config['date']
    sign = config['sign']
    acs_year = config['acs_year']
    title_vi_gdb = config['title_vi_gdb']

    csv_dir = config['processed_dir']
    ds_gdb = config['ds_gdb']

    # CSV TABLES
    patterns_name = config['files']['patterns']['name']
    patterns_table = os.path.join(csv_dir, f'{patterns_name}.csv')

    # FEATURE CLASS NAMES
    routes_dir_line = config['files']['feat_classes']['routes_dir']
    routes_line = config['files']['feat_classes']['routes']
    route_buffer = config['files']['feat_classes']['route_buffer']
    sys_buffer = config['files']['feat_classes']['sys_buffer']

    # MetroBusRoutes_Buffer and MetroBusSystem_Buffer
    buffer_list = [{
        'dist': '0.75 miles',
        'name': '075'
    }, {
        'dist': '0.5 miles',
        'name': '05'
    }, {
        'dist': '0.25 miles',
        'name': '025'
    }]

    # BUFFERING 0.75, 0.5, 0.25 MILES
    # has subsiquent for loops that run for each of the populations calcualtion as a part of titlevi
    for dist in buffer_list:

        # ROUTE BUFFER
        routes_buffer = f'{route_buffer}{dist["name"]}_{sign}_{date}'
        routes_buffer_loc = os.path.join(ds_gdb, routes_buffer)

        # DELETE DUPLICATE ROUTE FILE
        # deleteFeatureClass(routes_buffer, ds_gdb)

        ap.Buffer_analysis(routes_line, routes_buffer, dist['dist'], "FULL",
                           "ROUND", "NONE")
        print('Routes Buffered')

        # PATTERNS GROUP
        patterns_pd = pd.read_csv(patterns_table).groupby([
            'RouteAbbr', 'LineName', 'PubNum', 'LineNum', 'ShapeID', 'DirName'
        ]).mean()
        patterns_pd.drop(['shape_lat', 'shape_lon', 'shape_pt_sequence'],
                         axis=1)
        print('Unique Routes table created')

        # SYSTEM BUFFER (dissolves the route buffers)
        mb_sys_buffer = f'{sys_buffer}{dist["name"]}_{sign}_{date}'
        mb_sys_buffer_loc = os.path.join(ds_gdb, mb_sys_buffer)

        ap.Dissolve_management(routes_buffer, mb_sys_buffer)
        print('System Buffered')
        ap.AddField_management(mb_sys_buffer, 'type', 'TEXT')
        ap.CalculateField_management(mb_sys_buffer, 'type', '"system"')

        # TITLE VI POPULATION ANALYSIS

        # TITLE VI ANALYSIS FOR STANDARD FILES
        # ACS INPUT, TOTAL POPULATION FIELD, DENSITY POPULATION COUNT
        # takes the data from the titlevi fields and calculates the population
        # density for specific groups in order to get the total population of each group
        acs_list = [{
            'file_name': f'Minority{acs_year}_Final',
            'pop': 'TPop',
            'field': f'ClipPop{dist["name"]}',
            'calc': '(!TPop!/!SqMiles!)'
        }, {
            'file_name': f'Minority{acs_year}_Final',
            'pop': 'TMinority',
            'field': f'ClipMin{dist["name"]}',
            'calc': '!MinorityDens!'
        }, {
            'file_name': f'LEP{acs_year}_Final',
            'pop': 'TLEP',
            'field': f'ClipLEP{dist["name"]}',
            'calc': '!LEPDens!'
        }, {
            'file_name': f'Poverty{acs_year}_Final',
            'pop': 'TPov',
            'field': f'ClipPov{dist["name"]}',
            'calc': '!PovDens!'
        }, {
            'file_name': f'Senior{acs_year}_Final',
            'pop': 'TSenior',
            'field': f'ClipSen{dist["name"]}',
            'calc': '!SeniorDens!'
        }, {
            'file_name': f'NoCar{acs_year}_Final',
            'pop': 'TNoCar',
            'field': f'ClipNoCar{dist["name"]}',
            'calc': '!NoCarDens!'
        }, {
            'file_name': f'NoCar{acs_year}_Final',
            'pop': 'TLowCar',
            'field': f'ClipLowCar{dist["name"]}',
            'calc': '!LowCarDens!'
        }]

        # LOOP FOR CALCULATING TITLE VI POPULCATION BUFFERS
        for acs in acs_list:
            # CALCULATE OUT FOR SYSTEM AND ROUTES BUFFER POPULATIONS

            acs_in = os.path.join(title_vi_gdb, acs['file_name'])
            acs_out = f'{mb_sys_buffer}_{acs["pop"]}'

            print('')
            print('-------------------------')
            print(f'Start of {acs_out} Creation')
            print('-------------------------')
            print('')

            ap.Clip_analysis(acs_in, mb_sys_buffer, acs_out)
            ap.AddFields_management(
                acs_out, [[acs['field'], 'DOUBLE'], ['ClipSqMiles', 'DOUBLE']])
            print(f'Added fields to {acs_out} ')
            ap.CalculateFields_management(
                acs_out, 'PYTHON3',
                [['ClipSqMiles', "!shape.area@squaremiles!"],
                 [acs['field'], f'{acs["calc"]} * !ClipSqMiles!']])
            print(f'Calculated fields for {acs_out}')

            # dissolve out file name
            acs_out_diss = f'{acs_out}_dissolve'

            ap.Dissolve_management(acs_out, acs_out_diss, '',
                                   [[acs['field'], 'SUM']])
            ap.AddField_management(acs_out_diss, 'type', 'TEXT')
            ap.CalculateField_management(acs_out_diss, 'type', '"system"')
            ap.JoinField_management(mb_sys_buffer, 'type', acs_out_diss,
                                    'type', f'SUM_{acs["field"]}')
            ap.AddField_management(mb_sys_buffer, acs['field'], 'DOUBLE')
            ap.CalculateField_management(mb_sys_buffer, acs["field"],
                                         f'!SUM_{acs["field"]}!')
            ap.DeleteField_management(mb_sys_buffer, f'SUM_{acs["field"]}')

            # DELETE EXTRA FIELDS
            delete_list = [acs_out, acs_out_diss]
            for d in delete_list:
                ap.Delete_management(d)
env.overwriteOutput = True
env.outputCoordinateSystem = arcpy.SpatialReference(
    26911)  # Spatial reference NAD 1983 UTM Zone 11N. The code is '26911'

#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------

# Create photo point feature class
arcpy.CreateFeatureclass_management(
    out_path=path,
    out_name="PhotoPoints",
    geometry_type="POINT",
    spatial_reference=env.outputCoordinateSystem)
arcpy.AddFields_management(
    'PhotoPoints',
    [['LAYER', 'TEXT', 'Layer', 25], ['GDE_TYPE', 'TEXT', 'GDE Type', 100],
     ['CAPTION', 'TEXT', 'Caption', 255], [
         'FLICKR', 'TEXT', 'Flickr Link', 255
     ], ['JPEG', 'TEXT', 'JPEG Link', 255]])

# Manually add and attribute photo points
# Link to Flickr, JPEG, caption, and give credit

# Add finished photo points layer to Story Map gdb
photo_points = path + "\\PhotoPoints"
env.workspace = r"K:\GIS3\Projects\GDE\Geospatial\NV_iGDE_Story_061719.gdb"
arcpy.CopyFeatures_management(
    photo_points,
    r"K:\GIS3\Projects\GDE\Geospatial\NV_iGDE_Story_061719.gdb\NV_Photos")

# END
try:
    for positionCode in positionCodes:
        whereCountry = countryName + " =  '" + nationOfOrigin + "'"
        selectionCountryLayer = arcpy.SelectLayerByAttribute_management(
            countries, 'NEW_SELECTION', whereCountry)
        playersLayer = arcpy.SelectLayerByLocation_management(
            players, 'WITHIN', selectionCountryLayer)
        wherePosition = playerPosition + " =  '" + positionCode + "'"
        playersSubLayer = arcpy.SelectLayerByAttribute_management(
            playersLayer, 'SUBSET_SELECTION', wherePosition)

        # Execute Copy Features and Add Fields
        outputFC = "nhlRoster_" + nationOfOrigin + "_" + positionCode + ".shp"
        arcpy.CopyFeatures_management(playersSubLayer, outputFC)
        fields = ["weight", "weight_kg", "height", "height_cm"]
        arcpy.AddFields_management(
            outputFC, [["weight_kg", "FLOAT"], ["height_cm", "FLOAT"]])

        # Use update cursor to populate metric height and weight fields
        with arcpy.da.UpdateCursor(outputFC, fields) as cursor:
            for row in cursor:
                # Convert weight to kg and height to cm
                row[1] = row[0] * 0.453592
                feet, inches = row[2].rsplit("' ", 1)
                inches = inches[:-1]
                row[3] = ((int(feet) * 12) + int(inches)) * 2.54
                cursor.updateRow(row)
        del row, cursor

except:
    print(arcpy.GetMessages())
예제 #14
0
fields = arcpy.ListFields(source_points)
case_sort_dict = {}
for field in fields:
    if field.name == case_field:
        case_sort_dict['case_type'] = format_dict[field.type]
    elif field.name == sort_field:
        case_sort_dict['sort_type'] = format_dict[field.type]
    else:
        pass
# create fields
arcpy.AddFields_management(
    in_table=output_polygons,
    field_description=[['POINT_FID', 'SHORT'],
                       ['CASE_FIELD', case_sort_dict['case_type']],
                       ['SORT_FIELD', case_sort_dict['sort_type']],
                       ['PRIMARY_VALUE_FROM', 'DOUBLE'],
                       ['PRIMARY_VALUE_TO', 'DOUBLE'],
                       ['PRIMARY_CLASSED_FROM', 'SHORT'],
                       ['PRIMARY_CLASSED_TO', 'SHORT'],
                       ['SECONDARY_VALUE_FROM', 'DOUBLE'],
                       ['SECONDARY_VALUE_TO', 'DOUBLE'],
                       ['POLYGON_ANGLE', 'DOUBLE']])
msg('Output feature class created.')

# open search cursor on input points to gather geometry
index = 0
feature_dict = {}
with arcpy.da.SearchCursor(in_table=source_points,
                           field_names=[
                               'OBJECTID', case_field, sort_field, primary_p,
                               secondary_p, 'SHAPE@X', 'SHAPE@Y'
                           ],
예제 #15
0
def convertStreets(Project_Folder, us_counties):
    arcpy.env.overwriteOutput = True

    Model_Inputs_gdb = os.path.join(Project_Folder, 'Model_Inputs.gdb')
    Model_Outputs_gdb = os.path.join(Project_Folder, 'Model_Outputs.gdb')

    streets = os.path.join(Model_Inputs_gdb, 'Streets')
    zlevels = os.path.join(Model_Inputs_gdb, 'Zlevels')
    adminbound4 = os.path.join(Model_Inputs_gdb, 'Adminbndy4')

    arcpy.env.workspace = Model_Inputs_gdb

    # Simplify AltStreets and Streets Lines
    streets_simple = arcpy.SimplifyLine_cartography(in_features=streets, out_feature_class=os.path.join(Model_Outputs_gdb, "Streets_Simple"), algorithm="POINT_REMOVE",
                                                    tolerance="5 Feet", error_resolving_option="RESOLVE_ERRORS", collapsed_point_option="KEEP_COLLAPSED_POINTS", error_checking_option="CHECK", in_barriers=[])[0]

    arcpy.AddFields_management(in_table=streets_simple, field_description=[["REF_ZLEV", "LONG", "", "", "", ""],
                                                                           ["NREF_ZLEV", "LONG",
                                                                               "", "", "", ""],
                                                                           ["PlaceCodeL", "LONG",
                                                                               "", "", "", ""],
                                                                           ["PlaceCodeR", "LONG",
                                                                               "", "", "", ""],
                                                                           ["PlaceNamL", "TEXT",
                                                                               "", "255", "", ""],
                                                                           ["PlaceNamR", "TEXT",
                                                                               "", "255", "", ""],
                                                                           ["CountyCodeL", "LONG",
                                                                               "", "", "", ""],
                                                                           ["CountyCodeR", "LONG",
                                                                               "", "", "", ""],
                                                                           ["CountyNamL", "TEXT",
                                                                               "", "255", "", ""],
                                                                           ["CountyNamR", "TEXT",
                                                                               "", "255", "", ""],
                                                                           ["StateCodeL", "LONG",
                                                                               "", "", "", ""],
                                                                           ["StateCodeR", "LONG",
                                                                               "", "", "", ""],
                                                                           ["StateAbbrL", "TEXT",
                                                                               "", "255", "", ""],
                                                                           ["StateAbbrR", "TEXT",
                                                                               "", "255", "", ""],
                                                                           ["OneWay", "SHORT",
                                                                               "", "", "", ""],
                                                                           ["Speed", "LONG",
                                                                               "", "", "", ""],
                                                                           ["CFCC", "TEXT", "",
                                                                               "255", "", ""],
                                                                           ["M_LINK_ID", "LONG",
                                                                               "", "", "", ""],
                                                                           ["OLD_LINK_ID", "LONG", "", "", "", ""]])

    print('Fields added to Streets')
    # turning restrictions
    arcpy.JoinField_management(in_data=streets_simple, in_field="REF_IN_ID",
                               join_table=zlevels, join_field="NODE_ID", fields=["Z_LEVEL"])

    # return calculated z level turning restrictions
    arcpy.CalculateField_management(in_table=streets_simple, field="REF_ZLEV", expression="zlevCalc(!Z_LEVEL!)", expression_type="PYTHON3", code_block="""def zlevCalc(z):
    if(z != 0):
        return z 
    else:
        return 0""", field_type="TEXT")
    arcpy.DeleteField_management(
        in_table=streets_simple, drop_field=["ZLEVEL"])
    print('REF_ZLEV Calculated')

    # calculate NREF
    arcpy.JoinField_management(in_data=streets_simple, in_field="NREF_IN_ID",
                               join_table=zlevels, join_field="NODE_ID", fields=["Z_LEVEL"])
    arcpy.CalculateField_management(in_table=streets_simple, field="NREF_ZLEV", expression="zlevCalc(!Z_LEVEL!)", expression_type="PYTHON3", code_block="""def zlevCalc(z):
    if(z != 0):
        return z
    else:
        return 0""", field_type="TEXT")
    arcpy.DeleteField_management(
        in_table=streets_simple, drop_field=["ZLEVEL"])
    print('NREF_ZLEV Calculated')

    # Calculate Cities/AdminBndry4 fields
    # calculate R_AREA Cities
    arcpy.JoinField_management(in_data=streets_simple, in_field="R_AREA_ID",
                               join_table=adminbound4, join_field="AREA_ID", fields=["AREA_ID", "POLYGON_NM"])
    arcpy.CalculateField_management(
        in_table=streets_simple, field="PlaceCodeR", expression="!AREA_ID!", expression_type="PYTHON3")
    arcpy.CalculateField_management(in_table=streets_simple, field="PlaceNameR", expression="placeNameCalc(!POLYGON_NM!)", expression_type="PYTHON3", code_block="""def placeNameCalc(name):
    if name == 'ST LOUIS':
        return 'ST LOUIS CITY'
    else:
        return name""")
    arcpy.DeleteField_management(in_table=streets_simple, drop_field=[
                                 "AREA_ID", "POLYGON_NM"])

    # calculate L_AREA Cities
    arcpy.JoinField_management(in_data=streets_simple, in_field="L_AREA_ID",
                               join_table=adminbound4, join_field="AREA_ID", fields=["AREA_ID", "POLYGON_NM"])
    arcpy.CalculateField_management(
        in_table=streets_simple, field="PlaceCodeL", expression_type="PYTHON3", expression="!AREA_ID!")
    arcpy.CalculateField_management(in_table=streets_simple, field="PlaceNameL", expression_type="PYTHON3", expression="placeNameCalc(!POLYGON_NM!)",  code_block="""def placeNameCalc(name):
    if name == 'ST LOUIS':
        return 'ST LOUIS CITY'
    else:
        return name.upper()""")
    arcpy.DeleteField_management(in_table=streets_simple, drop_field=[
                                 "AREA_ID", "POLYGON_NM"])
    print('Cities Calculated')

    # Calculate County fields
    # CountyNameR, CountyNameL, CountyCodeL, CountyCodeR
    county_streets = arcpy.SpatialJoin_analysis(
        streets_simple, us_counties, "county_streets")[0]
    # US_COUNTIES needs to be TIGER or County level shapefile that has GEOID's
    arcpy.JoinField_management(in_data=streets_simple, in_field="LINK_ID",
                               join_table=county_streets, join_field="LINK_ID", fields=["GEOID", "NAME"])
    arcpy.CalculateField_management(in_table=streets_simple, field="CountyNameR", expression="placeNameCalc(!GEOID!, !NAME!)", expression_type="PYTHON3", code_block="""def placeNameCalc(geoid, name):
    if geoid == '29189':
        return 'ST LOUIS'
    elif geoid == '29510':
        return 'ST LOUIS CITY'
    elif geoid == '17163':
        return 'ST CLAIR'
    else:
        return name.upper()""")

    arcpy.CalculateField_management(in_table=streets_simple, field="CountyNameL", expression="placeNameCalc(!GEOID!, !NAME!)", expression_type="PYTHON3", code_block="""def placeNameCalc(geoid, name):
    if geoid == '29189':
        return 'ST LOUIS'
    elif geoid == '29510':
        return 'ST LOUIS CITY'
    elif geoid == '17163':
        return 'ST CLAIR'
    else:
        return name.upper()""")

    arcpy.CalculateField_management(
        in_table=streets_simple, field="CountyCodeR", expression="!GEOID!", expression_type="PYTHON3")
    arcpy.CalculateField_management(
        in_table=streets_simple, field="CountyCodeL", expression="!GEOID!", expression_type="PYTHON3")

    print("County Calculated")

    # Calculate State fields
    # StateAbbrL, StateAbbrR, StateCodeL, StateCodeR
    arcpy.CalculateField_management(
        in_table=streets_simple, field="StateCodeL", expression_type="PYTHON3", expression="!GEOID![0:2]")
    arcpy.CalculateField_management(in_table=streets_simple, field="StateAbbrL", expression_type="PYTHON3", expression="stateAbbr(!StateCodeL!)", code_block="""def stateAbbr(statecode):
    if statecode == 29:
        return 'MO'
    else:
        return 'IL' """)
    arcpy.CalculateField_management(
        in_table=streets_simple, field="StateCodeR", expression_type="PYTHON3", expression="!GEOID![0:2]")
    arcpy.CalculateField_management(in_table=streets_simple, field="StateAbbrR", expression_type="PYTHON3", expression="stateAbbr(!StateCodeR!)", code_block="""def stateAbbr(statecode):
    if statecode == 29:
        return 'MO'
    else:
        return 'IL' """)

    arcpy.DeleteField_management(
        in_table=streets_simple, drop_field=["GEOID", "NAME"])

    # One Way Calculation
    # T = >
    # F = <
    # if blank is not a one way road and returns blank
    arcpy.CalculateField_management(in_table=streets_simple, field="OneWay", expression="oneWCalc(!DIR_TRAVEL!)", expression_type="PYTHON3", code_block="""def oneWCalc(dir):
    if(dir == "T"):
        return ">"
    elif(dir == "F"):
        return "<"
    else:
        return '' """)

    # calculated speed with to and from speeds
    # uses either to or from speed depending on direction for oneway speed calcs
    arcpy.CalculateField_management(in_table=streets_simple, field="Speed", expression="speedCalc(!DIR_TRAVEL!,!TO_SPD_LIM!,!FR_SPD_LIM!)", expression_type="PYTHON3", code_block="""def speedCalc(dir, toSpeed, fromSpeed):
    if(dir == 'T'):
        return toSpeed
    else:
        return fromSpeed """)
    print('OneWay Calculated')

    # Calculate Speeds based on category
    # Calculates speed fields that are empty with the speed calc field specs from HERE documentation
    arcpy.CalculateField_management(in_table=streets_simple, field="Speed", expression="nullSpeedCalc(!Speed!, !SPEED_CAT!)", expression_type="PYTHON3", code_block="""def nullSpeedCalc(speed, cat):
    if(speed is None):
        if(cat == '8'):
            return 15
        elif(cat == '7'):
            return 20
        elif(cat == '6'):
            return 25
        elif(cat == '5'):
            return 35 """)
    print('Speed Calculated')

    # Calculate Functional Classes

    # TODO: REVIEW FUNCTIONAL CLASS CALCULATION
    # functional classes that adhear to the map maker specification
    arcpy.CalculateField_management(in_table=streets_simple, field="CFCC", expression="cfccCalc(!FUNC_CLASS!)", expression_type="PYTHON3", code_block="""def cfccCalc(fClass):
    if(fClass == 1):
        return 'A10'
    elif(fClass == 2):
        return 'A20'
    elif(fClass == 3):
        return 'A30'
    elif(fClass == 4 or fClass == 5):
        return 'A40' """)
    print('CFCC Calculated')

    # TODO: reassess calculation
    arcpy.CalculateFields_management(in_table=streets_simple, expression_type="PYTHON3", fields=[
                                     ["M_LINK_ID", "!OBJECTID!"], ["OLD_LINK_ID", "!LINK_ID!"]], code_block="")[0]

    # updated the schema to match mapmaker schema
    updateSchema(streets_simple)

    return arcpy.FeatureClassToFeatureClass_conversion(in_features=streets_simple, out_path=Model_Outputs_gdb, out_name="Streets_Final")[0]
# Create layers/tables in the empty geodatabase

# Define spatial reference for the geodatabase
# http://pro.arcgis.com/en/pro-app/arcpy/classes/pdf/projected_coordinate_systems.pdf
NAD83UTM11N = arcpy.SpatialReference(
    26911)  # Spatial reference NAD 1983 UTM Zone 11N. The WKID is '26911'
NAD83UTM11N.exportToString()  # Print detailed spatial reference information

# Phreatophyte - polygon feature class
arcpy.CreateFeatureclass_management(out_path=gdb,
                                    out_name="Phreatophytes",
                                    geometry_type="POLYGON",
                                    spatial_reference=NAD83UTM11N)
arcpy.AddFields_management('Phreatophytes',
                           [['SOURCE_CODE', 'TEXT', 'Source Code', 10],
                            ['PHR_TYPE', 'TEXT', 'Phreatophyte Type', 55],
                            ['PHR_GROUP', 'TEXT', 'Phreatophyte Group', 20],
                            ['COMMENTS', 'TEXT', 'Comments', 200]])
[f.name for f in arcpy.ListFields(gdb + "\\Phreatophytes")
 ]  # List all of the field names in the Phreatophytes layer

# Springs - point feature class
arcpy.CreateFeatureclass_management(out_path=gdb,
                                    out_name="Springs",
                                    geometry_type="POINT",
                                    spatial_reference=NAD83UTM11N)
arcpy.AddFields_management(
    'Springs',
    [['SOURCE_CODE', 'TEXT', 'Source Code', 10],
     ['SPRING_ID', 'LONG', 'Spring ID'],
     ['SPRING_NAME', 'TEXT', 'Spring Name', 255],
예제 #17
0
    stats_table_fields = arcpy.ListFields(in_mem_stats_tbl)
    for f in stats_table_fields:
        if not f.required:
            alias = f.aliasName
            field_type = f.type
            if f.type in add_field_type_map.keys():
                field_type = add_field_type_map[f.type]

            stats_tbl_fields.append([f.name, field_type, alias, f.length])

            if f.name == in_pxw_join_field:
                join_field_type = field_type

    arcpy.SetProgressor('default', 'Adding fields to output feature class ...')
    # add the fields
    arcpy.AddFields_management(final_output_fc_path, stats_tbl_fields)

    stats_table_fields_list = [f.name for f in stats_table_fields]
    stats_table_fields_list.insert(0, 'SHAPE@')
    # final_outfc_fields = ','.join(stats_table_fields_list)

    cnt = int(arcpy.GetCount_management(in_mem_stats_tbl)[0])
    arcpy.SetProgressor('step',
                        f'Inserting {cnt} rows into output feature class ...',
                        0, cnt, 1)
    # add features with geometry to the output feature class
    counter = 1
    with arcpy.da.SearchCursor(in_mem_stats_tbl, '*') as cursor:
        for row in cursor:
            arcpy.SetProgressorPosition(counter)
            arcpy.SetProgressorLabel(
def Model():  # Model

    # To allow overwriting outputs change overwriteOutput option to True.
    arcpy.env.overwriteOutput = False

    arcpy.ImportToolbox(r"c:\program files\arcgis\pro\Resources\ArcToolbox\toolboxes\Data Management Tools.tbx")
    Historical_Airtraffic_Data_may = "May\\airtraff_may2020_a"
    aircraft_db_csv = "aircraft_db.csv"
    Mean_NoiseMay2020_hexagon_2_ = "May\\Mean_NoiseMay2020_hexagon"

    # Process: Add Join (Add Join) 
    Joined_airtraffic_data = arcpy.AddJoin_management(in_layer_or_view=Historical_Airtraffic_Data_may, in_field="icao24", join_table=aircraft_db_csv, join_field="icao", join_type="KEEP_ALL")[0]

    # Process: Select Layer By Attribute (Select Layer By Attribute) 
    Eliminated_null_rows, Count = arcpy.SelectLayerByAttribute_management(in_layer_or_view=Joined_airtraffic_data, selection_type="NEW_SELECTION", where_clause="mdl IS NULL", invert_where_clause="")

    # Process: Generate Tessellation (Generate Tessellation) 
    Hexagonal_grid_per_1_km2 = "C:\\Users\\Dell\\Documents\\ArcGIS\\Projects\\mar apr may\\mar apr may.gdb\\GenerateTessellation"
    GenerateTessellation(Output_Feature_Class=Hexagonal_grid_per_1_km2, Extent="12.200035679703 47.3000064103676 13.700035679703 48.2000064103675", Shape_Type="HEXAGON", Size="0.0225 Unknown", Spatial_Reference="GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision")

    # Process: Add Fields (multiple) (Add Fields (multiple)) 
    Added_new_fields = arcpy.AddFields_management(in_table=Eliminated_null_rows, field_description=[["size_class", "TEXT", "", "255", "", ""], ["noiselevel", "LONG", "", "", "", ""]])[0]

    # Process: Classify size based on aircraft model (Calculate Field) 
    Classified_size_class = arcpy.CalculateField_management(in_table=Added_new_fields, field="size_class", expression="Reclass(!mdl!)", expression_type="PYTHON3", code_block="# Reclassify values to another value
# More calculator examples at esriurl.com/CalculatorExamples
def Reclass(mdl):
    if mdl == \"a400\":
        return \"large\"
    elif mdl == \"b757\":
        return \"large\"
    elif mdl == \"a319\":
        return \"medium\"
    elif mdl == \"a320\":
        return \"medium\"
    elif mdl == \"a321\":
        return \"medium\"
    elif mdl == \"b733\":
        return \"medium\"
    elif mdl == \"b737\":
        return \"medium\"
    elif mdl == \"b738\":
        return \"medium\"
    elif mdl == \"b739\":
        return \"medium\"
    elif mdl == \"b752\":
        return \"medium\"
    elif mdl == \"bcs3\":
        return \"medium\"
    elif mdl == \"crj2\":
        return \"medium\"
    elif mdl == \"rj1h\":
        return \"medium\"
    elif mdl == \"dh8d\":
        return \"medium\"
    elif mdl == \"fa8x\":
        return \"medium\"
    else:
        return \"small\"", field_type="TEXT")[0]

    # Process: Classify noise based on size (Calculate Field) 
    Classified_noiselevel = arcpy.CalculateField_management(in_table=Classified_size_class, field="noiselevel", expression="Reclass(!size_class!,!altitude!", expression_type="PYTHON3", code_block="# Reclassify values to another value
# More calculator examples at esriurl.com/CalculatorExamples
def Reclass(size_class, altitude):
    if size_class is \"large\" and altitude <=11000:
        return 60
    elif size_class == \"large\" and (altitude > 11000 and altitude <= 15000):
        return 52
    elif size_class == \"large\" and (altitude > 11000 and altitude <= 15000):
        return 52
    elif size_class == \"large\" and (altitude > 15000 and altitude <= 16000):
        return 43
    elif size_class == \"large\" and (altitude > 16000):
        return 42
    elif size_class == \"medium\" and altitude <=4000:
        return 71
    elif size_class == \"medium\" and (altitude > 4000 and altitude <= 6000):
        return 70
    elif size_class == \"medium\" and (altitude > 6000 and altitude <= 9000):
        return 60
    elif size_class == \"medium\" and (altitude > 9000 and altitude <= 15000):
        return 59
    elif size_class == \"medium\" and (altitude > 15000 and altitude <= 16000):
        return 46
    elif size_class == \"medium\" and (altitude > 16000):
        return 36
    elif size_class == \"small\" and altitude <=4000:
        return 67
    elif size_class == \"small\" and (altitude > 4000 and altitude <= 5000):
        return 54
    elif size_class == \"small\" and (altitude > 5000):
        return 43", field_type="TEXT")[0]

    # Process: Point to Raster (Mean aggregated) (Point to Raster) 
    Mean_Noise_may_2020 = "C:\\Users\\Dell\\Documents\\ArcGIS\\Projects\\mar apr may\\mar apr may.gdb\\airtraff_may2020_a_PointToRaster"
    arcpy.PointToRaster_conversion(in_features=Classified_noiselevel, value_field="noiselevel", out_rasterdataset=Mean_Noise_may_2020, cell_assignment="MEAN", priority_field="NONE", cellsize="0.001")

    # Process: extract cell value (Raster to Point) 
    Points_represent_noiselevel = "C:\\Users\\Dell\\Documents\\ArcGIS\\Projects\\mar apr may\\mar apr may.gdb\\RasterT_airtraf1"
    arcpy.RasterToPoint_conversion(in_raster=Mean_Noise_may_2020, out_point_features=Points_represent_noiselevel, raster_field="VALUE")

    # Process: Feature To Polygon (Feature To Polygon) 
    Mean_NoiseMay2020_hexagon = "C:\\Users\\Dell\\Documents\\ArcGIS\\Projects\\mar apr may\\mar apr may.gdb\\GenerateTessellation_Feature"
    arcpy.FeatureToPolygon_management(in_features=[Hexagonal_grid_per_1_km2], out_feature_class=Mean_NoiseMay2020_hexagon, cluster_tolerance="", attributes="ATTRIBUTES", label_features=Points_represent_noiselevel)

    # Process: Polygon to Raster (Polygon to Raster) 
    Mean_NoiseZone_may2020 = "C:\\Users\\Dell\\Documents\\ArcGIS\\Projects\\mar apr may\\mar apr may.gdb\\GenerateTessellation_Feature_PolygonToRaster"
    if Mean_NoiseMay2020_hexagon:
        arcpy.PolygonToRaster_conversion(in_features=Mean_NoiseMay2020_hexagon_2_, value_field="grid_code", out_rasterdataset=Mean_NoiseZone_may2020, cell_assignment="CELL_CENTER", priority_field="NONE", cellsize="0.02")
tempttable = infeature + 'tempttable'

arcpy.Project_management(
    in_dataset=infeature,
    out_dataset=temptfeature,
    out_coor_system=
    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]",
    transform_method=[],
    in_coor_system=
    "PROJCS['CGCS2000_3_Degree_GK_Zone_35',GEOGCS['GCS_China_Geodetic_Coordinate_System_2000',DATUM['D_China_2000',SPHEROID['CGCS2000',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Gauss_Kruger'],PARAMETER['False_Easting',35500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',105.0],PARAMETER['Scale_Factor',1.0],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]",
    preserve_shape="NO_PRESERVE_SHAPE",
    max_deviation="",
    vertical="NO_VERTICAL")
arcpy.AddFields_management(
    in_table=temptfeature,
    field_description=[["Left", "DOUBLE", "", "", "", ""],
                       ["Bottom", "DOUBLE", "", "", "", ""],
                       ["Right", "DOUBLE", "", "", "", ""],
                       ["Top", "DOUBLE", "", "", "", ""]])
arcpy.CalculateGeometryAttributes_management(
    temptfeature, [["Left", "EXTENT_MIN_X"], ["Bottom", "EXTENT_MIN_Y"],
                   ["Right", "EXTENT_MAX_X"], ["Top", "EXTENT_MAX_Y"]])
arcpy.Statistics_analysis(in_table=temptfeature,
                          out_table=tempttable,
                          statistics_fields=[["Left",
                                              "MIN"], ["Bottom", "MIN"],
                                             ["Right", "MAX"], ["Top", "MAX"]],
                          case_field=[])

cursor = arcpy.SearchCursor(tempttable)

for row in cursor:
arcpy.Statistics_analysis("notakeall_diss_project","sum_NOTAKEall",[["AREA_GEO","SUM"]])

arcpy.Select_analysis("all_wdpa_polybuffpnt", r"in_memory\notake_part","NO_TAKE = 'Part'")
arcpy.Statistics_analysis(r"in_memory\notake_part","sum_NOTAKEpart",[["NO_TK_AREA","SUM"]])

elapsed_hours = (time.clock() - start)/3600
print(("Stage 1 took " + str(elapsed_hours) + " hours"))

##-------------------------------------------------------------------------------------------------------------------------
#Stage 2: National and National PAME analysis

print ("Stage 2 of 2: National & National PAME Analyses")

# create the summary tables for appending in individual natioanl summary statistics
out_national_current_schema = arcpy.CreateTable_management(workspace,"out_national_current_schema")
arcpy.AddFields_management(out_national_current_schema,[['WDPA_ISO3','TEXT'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])

out_national_temporal_schema = arcpy.CreateTable_management(workspace,"out_national_temporal_schema")
arcpy.AddFields_management(out_national_temporal_schema,[['WDPA_ISO3','TEXT'],['MIN_STATUS_YR','DOUBLE'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])

out_national_current_schema_pame = arcpy.CreateTable_management(workspace,"out_national_current_schema_pame")
arcpy.AddFields_management(out_national_current_schema_pame,[['WDPA_ISO3','TEXT'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])

out_national_temporal_schema_pame = arcpy.CreateTable_management(workspace,"out_national_temporal_schema_pame")
arcpy.AddFields_management(out_national_temporal_schema_pame,[['WDPA_ISO3','TEXT'],['MIN_STATUS_YR','DOUBLE'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])

# join pame list to polybuffpnt
arcpy.JoinField_management("all_wdpa_polybuffpnt","WDPAID",in_pame_sites,"wdpa_id","evaluation_id")

# update field (0) for those that don't have id
arcpy.CalculateField_management("all_wdpa_polybuffpnt","evaluation_id","updateValue(!evaluation_id!)","PYTHON_9.3", in_codeblock1)
예제 #21
0
def idRoutes(year, root_dir, routes, final_gdb_loc):
    gdb = f"IdentifiedRoutes{year}.gdb"
    ap.env.workspace = os.path.join(root_dir, gdb)  # -----> Change

    ap.ClearWorkspaceCache_management()

    working_gdb = ap.env.workspace
    working_file = "IdentifiedRoutes_working"

    # Get input demographic feature classes from previous function outputs
    # minority_gdb = os.path.join(root_dir, f"Minority{year}.gdb")  # -----> Change Year
    # poverty_gdb = os.path.join(root_dir, f"Poverty{year}.gdb")  # -----> Change Year
    # lep_gdb = os.path.join(root_dir, f"LEP{year}.gdb")
    minority_file = os.path.join(final_gdb_loc, f"Minority{year}_Final")
    # minority_file = os.path.join(minority_gdb, f"Minority{year}_Final")
    poverty_file = os.path.join(final_gdb_loc, f"Poverty{year}_Final")
    # poverty_file = os.path.join(poverty_gdb, f"Poverty{year}_Final")
    lep_file = os.path.join(final_gdb_loc, f"LEP{year}_Final")
    medhhinc_file = os.path.join(final_gdb_loc, f"MedHHInc{year}_Final")
    # lep_file = os.path.join(lep_gdb, f"LEP{year}_Final")

    # Working feature classes
    minority_working_file = f"Minority{year}_BG"
    poverty_working_file = f"Poverty{year}_BG"
    lep_working_file = f"LEP{year}_BG"
    medhhinc_working_file = f"MedHHInc{year}_BG"

    routes_file = f"IdentifiedRoutes{year}"
    routes_working = os.path.join(working_gdb, routes_file)

    # define inputs for the for loop - one set for each demographic category
    working_list = [
        {
            "org_file":
            minority_file,  # input feature class
            "working_file":
            minority_working_file,  # working feature class for calcs
            "identified_field":
            "RegMinBG",  # field containing the threshold value for the region
            "add_fields": [['MinorityLength', 'double'],
                           ['PMinority', 'double'], ['MinorityRoute', 'SHORT']]
        },  # route fields to be added
        {
            "org_file":
            poverty_file,
            "working_file":
            poverty_working_file,
            "identified_field":
            "RegPovBG",
            "add_fields": [['PovertyLength', 'double'], ['PPoverty', 'double'],
                           ['PovertyRoute', 'SHORT']]
        },
        {
            "org_file":
            medhhinc_file,
            "working_file":
            medhhinc_working_file,
            "identified_field":
            "RegBelMedInc",
            "add_fields": [['MedHHIncLength', 'double'],
                           ['PMedHHInc', 'double'], ['MedHHIncRoute', 'SHORT']]
        },
        {
            "org_file":
            lep_file,
            "working_file":
            lep_working_file,
            "identified_field":
            "RegAbvLEP",
            "add_fields": [['LEPLength', 'double'], ['PLEP', 'double'],
                           ['LEPRoute', 'SHORT']]
        }
    ]

    # ! is this a helper function now
    if os.path.exists(working_gdb) and os.path.isdir(working_gdb):
        shutil.rmtree(working_gdb)
        print(f"{gdb} DELETED!!!")

    # CREATE WORKING GDB
    ap.CreateFileGDB_management(root_dir, gdb)
    print("GEODATABASE CREATED!!!")

    # CREATE WORKING MINORITY, POVERTY AND ROUTES FEATURE CLASSES
    ap.FeatureClassToFeatureClass_conversion(routes, working_gdb, routes_file)
    print("FEATURE CLASS CREATED!!!")

    ap.AddFields_management(routes_working, [['FullLength', 'double']])
    print('INTIIAL FIELDS ADDED TO ROUTES_WORKING FILE!!!')

    ap.CalculateFields_management(routes_working, 'PYTHON3',
                                  [['FullLength', '!shape.length@miles!']])
    print('CALCULATE FULL LENGTH OF ROUTES!!!')

    # loop through each demographic category, first collecting inputs from the working list,
    # then
    for item in working_list:
        # WORKING LIST ITEM DEFINITIONS
        org_file = item["org_file"]
        working_file = item["working_file"]
        identified_field = item["identified_field"]
        add_fields = item["add_fields"]
        routes_analysis = "routes_" + str(working_file)
        length_field = add_fields[0][0]
        percent_field = add_fields[1][0]
        id_field = add_fields[2][0]

        print("")
        print("--------------------------------")
        print("********************************")
        print("START OF " + working_file)
        print("********************************")
        print("--------------------------------")
        print("")

        # FOR LOOP FILE NAME DEFINITIONS
        dissolve_file = str(working_file) + "_dissolve"
        buffer_file = str(dissolve_file) + "_buffer"

        clip_routes = str(routes_analysis) + "_clip"
        dissolve_routes = str(clip_routes) + "_dissolve"

        # FOR LOOP POLYGON AND ROUTE GEOPROCESSING
        selected_bg = str(
            identified_field
        ) + " = 1"  # "where" expression filtering for identified blockgroups
        print(selected_bg)
        ap.FeatureClassToFeatureClass_conversion(org_file, working_gdb,
                                                 working_file, selected_bg)
        print(working_file + " CREATED!!!")

        ap.FeatureClassToFeatureClass_conversion(routes_working, working_gdb,
                                                 routes_analysis)
        print(routes_analysis + " FILE CREATED!!!")

        ap.Dissolve_management(working_file, dissolve_file,
                               '')  # dissolve all into one shape
        print(dissolve_file + " CREATED!!!")

        ap.Buffer_analysis(dissolve_file, buffer_file,
                           "50 feet")  # buffer by 50 feet
        print(buffer_file + " CREATED!!!")

        ap.Clip_analysis(routes_working, buffer_file,
                         clip_routes)  # clip routes using the dissolve shape
        print(clip_routes + " CREATED!!!")

        # calculate length of route inside identified blockgroups and compare to total length
        ap.AddField_management(clip_routes, "IdLength", "double")
        print("IdLength Field Added for " + working_file)

        ap.CalculateField_management(clip_routes, "IdLength",
                                     "!shape.geodesicLength@miles!")
        print("IdLength Field Calculated for " + working_file)

        ap.Dissolve_management(
            clip_routes, dissolve_routes, 'LineAbbr',
            [["IdLength", 'sum']])  # collect route pieces by route
        print(clip_routes + " DISSOLVED")

        ap.JoinField_management(routes_working, "LineAbbr", dissolve_routes,
                                "LineAbbr",
                                ["SUM_IdLength"])  # join and sum ID'ed length
        print(routes_working + " JOINED WITH " + dissolve_routes)

        ap.AddFields_management(routes_working, add_fields)
        print("FIELDS ADDED TO " + routes_working)

        # compute percentage of total that is ID'ed then flag if greater than 0.33
        ap.CalculateFields_management(
            routes_working, 'PYTHON3',
            [[length_field, '!SUM_IdLength!'],
             [percent_field, f'percent(!{length_field}!, !FullLength!)']],
            '''def percent(calc, full):
                                        if calc is None:
                                            return 0
                                        else:
                                            return calc / full
                                    ''')
        ap.CalculateFields_management(
            routes_working, 'PYTHON3',
            [[id_field, f'ifBlock(!{percent_field}!)']],
            '''def ifBlock(percent):
                                        if percent > 0.33:
                                            return 1
                                        else:
                                            return 0
                                    ''')
        print(routes_working + " FIELDS CALCULATED")

        ap.DeleteField_management(routes_working, "SUM_IdLength")
        print("IdLength Field Deleted")

    ## loop end ##

    ap.ClearWorkspaceCache_management()

    deleteFeatureClass(routes_file, final_gdb_loc)

    # CREATE FINAL FEATURE CLASS
    ap.FeatureClassToFeatureClass_conversion(routes_file, final_gdb_loc,
                                             routes_file)
    print("---------------------------")