def polygons_in_zones(zone_fc, zone_field, polygons_of_interest, output_table, interest_selection_expr):
    old_workspace = arcpy.env.workspace
    arcpy.env.workspace = 'in_memory'
    arcpy.SetLogHistory(False)
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(102039)
    selected_polys = 'selected_polys'
    # fixes some stupid ArcGIS thing with the interactive Python window
    if arcpy.Exists(selected_polys):
        arcpy.env.overwriteOutput = True

    arcpy.AddMessage('Copying/selecting polygon features...')
    if interest_selection_expr:
        arcpy.Select_analysis(polygons_of_interest, selected_polys, interest_selection_expr)
    else:
        arcpy.CopyFeatures_management(polygons_of_interest, selected_polys)

    # use tabulate intersection for the areas overlapping
    arcpy.AddMessage('Tabulating intersection between zones and polygons...')
    tab_table = arcpy.TabulateIntersection_analysis(zone_fc, zone_field, selected_polys,
                                        'tabulate_intersection_table')

    # area was calculated in map units which was m2 so convert to hectares
    arcpy.AddField_management(tab_table, 'Poly_Ha', 'DOUBLE')
    arcpy.CalculateField_management(tab_table, 'Poly_Ha', '!AREA!/10000', 'PYTHON')


    # just change the name of the percent field
    arcpy.AlterField_management(tab_table, 'PERCENTAGE', 'Poly_Pct')

    # Now just get the count as there is no other area metric anymore
    spjoin_fc = arcpy.SpatialJoin_analysis(zone_fc, selected_polys, 'spatial_join_output')
    arcpy.AlterField_management(spjoin_fc, 'Join_Count', 'Poly_n')

    # Add the density
    arcpy.AddField_management(spjoin_fc, 'Poly_nperha', 'DOUBLE')
    arcpy.CalculateField_management(spjoin_fc, 'Poly_nperha', '!Poly_n!/!shape.area@hectares!', 'PYTHON')

    arcpy.AddMessage('Refining output...')
    arcpy.JoinField_management(tab_table, zone_field, spjoin_fc, zone_field, ["Poly_n", 'Poly_nperha'])
    final_fields = ['Poly_Ha', 'Poly_Pct', 'Poly_n', 'Poly_nperha']

    # make output nice
    arcpy.env.overwriteOutput = False
    cu.one_in_one_out(tab_table, final_fields, zone_fc, zone_field, output_table)

    cu.redefine_nulls(output_table, final_fields, [0, 0, 0, 0])

    # clean up
    # can't delete all of in_memory because this function is meant to be called from another one that uses in_memory
    for item in [selected_polys, tab_table, spjoin_fc]:
        arcpy.Delete_management(item)
    arcpy.env.workspace = old_workspace

    arcpy.AddMessage('Polygons in zones tool complete.')
    arcpy.SetLogHistory(True)
def stats_area_table(zone_fc,
                     zone_field,
                     in_value_raster,
                     out_table,
                     is_thematic,
                     warn_at_end=False):
    orig_env = arcpy.env.workspace
    arcpy.env.workspace = 'in_memory'
    arcpy.CheckOutExtension("Spatial")
    arcpy.AddMessage("Calculating zonal statistics...")

    # Set up environments for alignment between zone raster and theme raster
    env.snapRaster = in_value_raster
    env.cellSize = in_value_raster
    env.extent = zone_fc

    # TODO: If we experience errors again, add a try/except where the except writes the
    # conversion raster to a scratch workspace instead, that eliminated the errors we
    # we getting several years ago with 10.1, not sure if they will happen still.
    arcpy.PolygonToRaster_conversion(zone_fc, zone_field, 'convert_raster',
                                     'MAXIMUM_AREA')
    env.extent = "MINOF"
    arcpy.sa.ZonalStatisticsAsTable('convert_raster', zone_field,
                                    in_value_raster, 'temp_zonal_table',
                                    'DATA', 'ALL')

    if is_thematic:
        #for some reason env.cellSize doesn't work
        desc = arcpy.Describe(in_value_raster)
        cell_size = desc.meanCelLHeight

        # calculate/doit
        arcpy.AddMessage("Tabulating areas...")
        arcpy.sa.TabulateArea('convert_raster', zone_field, in_value_raster,
                              'Value', 'temp_area_table', cell_size)

        # making the output table
        arcpy.CopyRows_management('temp_area_table', 'temp_entire_table')
        zonal_stats_fields = ['COUNT', 'AREA']
        arcpy.JoinField_management('temp_entire_table', zone_field,
                                   'temp_zonal_table', zone_field,
                                   zonal_stats_fields)

        # cleanup
        arcpy.Delete_management('temp_area_table')

    if not is_thematic:
        # making the output table
        arcpy.CopyRows_management('temp_zonal_table', 'temp_entire_table')

    arcpy.AddMessage("Refining output table...")

    # Join to the input zones raster
    arcpy.AddField_management('convert_raster', 'Pct_NoData', 'DOUBLE')
    arcpy.CopyRows_management('convert_raster', 'zones_VAT')
    arcpy.JoinField_management('zones_VAT', zone_field, 'temp_entire_table',
                               zone_field)
    calculate_expr = '100*(1-(float(!COUNT_1!)/!Count!))'
    arcpy.CalculateField_management('zones_VAT', 'Pct_NoData', calculate_expr,
                                    "PYTHON")
    refine_zonal_output('zones_VAT', zone_field, is_thematic)

    # final table gets a record even for no-data zones
    keep_fields = [f.name for f in arcpy.ListFields('zones_VAT')]
    if zone_field.upper() in keep_fields:
        keep_fields.remove(zone_field.upper())
    if zone_field in keep_fields:
        keep_fields.remove(zone_field)
    cu.one_in_one_out('zones_VAT', keep_fields, zone_fc, zone_field, out_table)

    # Convert missing "Pct_NoData" values to 100
    codeblock = """def convert_pct(arg1):
        if arg1 is None:
            return float(100)
        else:
            return arg1"""
    arcpy.CalculateField_management(out_table, 'Pct_NoData',
                                    'convert_pct(!Pct_NoData!)', 'PYTHON_9.3',
                                    codeblock)

    # count whether all zones got an output record or not)
    out_count = int(
        arcpy.GetCount_management('temp_entire_table').getOutput(0))
    in_count = int(arcpy.GetCount_management(zone_fc).getOutput(0))
    count_diff = in_count - out_count

    # cleanup
    for item in [
            'temp_zonal_table', 'temp_entire_table', 'convert_raster',
            'zones_VAT'
    ]:
        arcpy.Delete_management(item)
    arcpy.ResetEnvironments()
    arcpy.env.workspace = orig_env  # hope this prevents problems using list of FCs from workspace as batch
    arcpy.CheckInExtension("Spatial")

    return [out_table, count_diff]
Beispiel #3
0
def connected_wetlands(lakes_fc, lake_id_field, wetlands_fc, out_table):
    env.workspace = 'in_memory'
    env.outputCoordinateSystem = arcpy.SpatialReference(102039)

    arcpy.Buffer_analysis(lakes_fc, 'lakes_30m', '30 meters')

    arcpy.FeatureToLine_management('lakes_30m', 'shorelines')

    # 3 selections for the wetlands types we want to look at
    openwater_exp = """"VegType" = 'PEMorPAB'"""
    forested_exp = """"VegType" = 'PFO'"""
    scrubshrub_exp = """"VegType" = 'PSS'"""
    other_exp = """"VegType" = 'Other'"""
    all_exp = ''

    selections = [
        all_exp, forested_exp, scrubshrub_exp, openwater_exp, other_exp
    ]
    temp_tables = [
        'AllWetlands', 'ForestedWetlands', 'ScrubShrubWetlands',
        'OpenWaterWetlands', 'OtherWetlands'
    ]

    # for each wetland type, get the count of intersection wetlands, and the length of the lake
    # shoreline that is within a wetland polygon
    for sel, temp_table in zip(selections, temp_tables):
        print("Creating temporary table for wetlands where {0}".format(sel))
        # this function adds the count and the area using the lake as the zone
        polygons_in_zones('lakes_30m',
                          lake_id_field,
                          wetlands_fc,
                          temp_table,
                          sel,
                          contrib_area=True)

        # make good field names now rather than later
        for f in new_fields:
            cu.rename_field(temp_table, f, f.replace('Poly', temp_table), True)

        # shoreline calculation
        # using the Shape_Length field so can't do this part in memory
        shoreline_gdb = cu.create_temp_GDB('shoreline')
        selected_wetlands = os.path.join(shoreline_gdb, 'wetlands')
        arcpy.Select_analysis(wetlands_fc, selected_wetlands, sel)
        intersect_output = os.path.join(shoreline_gdb, "intersect")
        arcpy.Intersect_analysis(['shorelines', selected_wetlands],
                                 intersect_output)
        arcpy.Statistics_analysis(intersect_output, 'intersect_stats',
                                  [['Shape_Length', 'SUM']], lake_id_field)
        cu.one_in_one_out('intersect_stats', ['SUM_Shape_Length'], lakes_fc,
                          lake_id_field, 'temp_shoreline_table')
        cu.redefine_nulls('temp_shoreline_table', ['SUM_Shape_Length'], [0])
        shoreline_field = temp_table + "_Shoreline_Km"
        arcpy.AddField_management('temp_shoreline_table', shoreline_field,
                                  'DOUBLE')
        arcpy.CalculateField_management('temp_shoreline_table',
                                        shoreline_field,
                                        '!SUM_Shape_Length!/1000', 'PYTHON')

        # join the shoreline value to the temp_table
        arcpy.JoinField_management(temp_table, lake_id_field,
                                   'temp_shoreline_table', lake_id_field,
                                   shoreline_field)

        # clean up shoreline intermediates
        for item in [shoreline_gdb, 'intersect_stats', 'temp_shoreline_table']:
            arcpy.Delete_management(item)

    # join em up and copy to final
    temp_tables.remove('AllWetlands')
    for t in temp_tables:
        try:
            arcpy.JoinField_management('AllWetlands', lake_id_field, t,
                                       lake_id_field)
        # sometimes there's no table if it was an empty selection
        except:
            empty_fields = [f.replace('Poly', t) for f in new_fields]
            for ef in empty_fields:
                arcpy.AddField_management('AllWetlands', ef, 'Double')
                arcpy.CalculateField_management('AllWetlands', ef, '0',
                                                'PYTHON')
            continue
    # remove all the extra zone fields, which have underscore in name
    drop_fields = [
        f.name
        for f in arcpy.ListFields('AllWetlands', 'Permanent_Identifier_*')
    ]
    for f in drop_fields:
        arcpy.DeleteField_management('AllWetlands', f)

    # remove all the overlapping metrics, which do not apply by definition
    fields = [f.name for f in arcpy.ListFields('AlLWetlands')]
    for f in fields:
        if 'Overlapping' in f:
            arcpy.DeleteField_management('AllWetlands', f)
    arcpy.CopyRows_management('AllWetlands', out_table)

    for item in ['AllWetlands'] + temp_tables:
        try:
            arcpy.Delete_management(item)
        except:
            continue
def stats_area_table(zone_fc, zone_field, in_value_raster, out_table, is_thematic, debug_mode = False):
    orig_env = arcpy.env.workspace
    if debug_mode:
        arcpy.env.overwriteOutput = True
        temp_gdb = cu.create_temp_GDB('zonal_tabarea')
        arcpy.env.workspace = temp_gdb
        arcpy.AddMessage('Debugging workspace located at {}'.format(temp_gdb))
    else:
        arcpy.env.workspace = 'in_memory'
    arcpy.CheckOutExtension("Spatial")

    # Set up environments for alignment between zone raster and theme raster
    this_files_dir = os.path.dirname(os.path.abspath(__file__))
    os.chdir(this_files_dir)
    common_grid = os.path.abspath('../common_grid.tif')
    env.snapRaster = common_grid
    env.cellSize = common_grid
    CELL_SIZE = 30
    env.extent = zone_fc

    zone_desc = arcpy.Describe(zone_fc)
    zone_raster = 'convertraster'
    if zone_desc.dataType != 'RasterDataset':
        arcpy.PolygonToRaster_conversion(zone_fc, zone_field, zone_raster, 'CELL_CENTER', cellsize = CELL_SIZE)
    else:
        zone_raster = zone_fc

    # I tested and there is no need to resample the raster being summarized. It will be resampled correctly
    # internally in the following tool given that the necessary environments are set above (cell size, snap).
    #in_value_raster = arcpy.Resample_management(in_value_raster, 'in_value_raster_resampled', CELL_SIZE)
    if not is_thematic:
        arcpy.AddMessage("Calculating Zonal Statistics...")
        temp_entire_table = arcpy.sa.ZonalStatisticsAsTable(zone_raster, zone_field, in_value_raster, 'temp_zonal_table', 'DATA', 'MIN_MAX_MEAN')

    if is_thematic:
        #for some reason env.cellSize doesn't work
        # calculate/doit
        arcpy.AddMessage("Tabulating areas...")
        temp_entire_table = arcpy.sa.TabulateArea(zone_raster, zone_field, in_value_raster, 'Value', 'temp_area_table', CELL_SIZE)

        # replaces join to Zonal Stats in previous versions of tool
        # no joining, just calculate the area/count from what's produced by TabulateArea
        arcpy.AddField_management(temp_entire_table, 'AREA', 'DOUBLE')
        arcpy.AddField_management(temp_entire_table, 'COUNT', 'DOUBLE')

        cursor_fields = ['AREA', 'COUNT']
        value_fields = [f.name for f in arcpy.ListFields(temp_entire_table, 'VALUE*')]
        cursor_fields.extend(value_fields)
        with arcpy.da.UpdateCursor(temp_entire_table, cursor_fields) as uCursor:
            for uRow in uCursor:
                area, count, value_fields = uRow[0], uRow[1], uRow[2:]
                area = sum(value_fields)
                count = round(area/(CELL_SIZE*CELL_SIZE), 0)
                new_row = [area, count] + value_fields
                uCursor.updateRow(new_row)

    arcpy.AddMessage("Refining output table...")

    arcpy.AddField_management(temp_entire_table, 'DataCoverage_pct', 'DOUBLE')

    # calculate DataCoverage_pct by comparing to original areas in zone raster
    # alternative to using JoinField, which is prohibitively slow if zones exceed hu12 count
    zone_raster_dict = {row[0]:row[1] for row in arcpy.da.SearchCursor(zone_raster, [zone_field, 'Count'])}
    temp_entire_table_dict = {row[0]:row[1] for row in arcpy.da.SearchCursor(temp_entire_table, [zone_field, 'COUNT'])}
    with arcpy.da.UpdateCursor(temp_entire_table, [zone_field, 'DataCoverage_Pct']) as cursor:
        for uRow in cursor:
            key_value, data_pct = uRow
            count_orig = zone_raster_dict[key_value]
            if key_value in temp_entire_table_dict:
                count_summarized = temp_entire_table_dict[key_value]
                data_pct = 100*float(count_summarized/count_orig)
            else:
                data_pct = None
            cursor.updateRow((key_value, data_pct))

    # Refine the output
    refine_zonal_output(temp_entire_table, zone_field, is_thematic)

    # final table gets a record even for no-data zones
    keep_fields = [f.name for f in arcpy.ListFields(temp_entire_table)]
    if zone_field.upper() in keep_fields:
        keep_fields.remove(zone_field.upper())
        zone_field = zone_field.upper()
    if zone_field in keep_fields:
        keep_fields.remove(zone_field)

    # not needed as long we are working only with rasters
    # in order to add vector capabilities back, need to do something with this
    # right now we just can't fill in polygon zones that didn't convert to raster in our system
    cu.one_in_one_out(temp_entire_table, keep_fields, zone_fc, zone_field, out_table)

    # Convert "DataCoverage_pct" values to 0 for zones with no metrics calculated
    codeblock = """def convert_pct(arg1):
        if arg1 is None:
            return float(0)
        else:
            return arg1"""
    arcpy.CalculateField_management(out_table, 'DataCoverage_pct', 'convert_pct(!DataCoverage_pct!)', 'PYTHON_9.3', codeblock)

    # count whether all zones got an output record or not)
    out_count = int(arcpy.GetCount_management(temp_entire_table).getOutput(0))
    in_count = int(arcpy.GetCount_management(zone_fc).getOutput(0))
    count_diff = in_count - out_count

    # cleanup
    if not debug_mode:
        for item in ['temp_zonal_table', 'temp_entire_table', 'in_memory', 'zones_VAT']:
            arcpy.Delete_management(item)
    arcpy.ResetEnvironments()
    arcpy.env.workspace = orig_env # hope this prevents problems using list of FCs from workspace as batch
    arcpy.CheckInExtension("Spatial")

    return [out_table, count_diff]
def stats_area_table(zone_fc, zone_field, in_value_raster, out_table, is_thematic):
    arcpy.CheckOutExtension("Spatial")
    cu.multi_msg("Calculating zonal statistics...")
    temp_zonal_table = 'in_memory/zonal_stats_temp'
    temp_entire_table = 'in_memory/temp_entire_table'

    # calculate/doit
    env.snapRaster = in_value_raster
    env.cellSize = in_value_raster

    # this has to be on disk for some reason to avoid background processing
    # errors thrown up at random
    # hence we get the following awkward horribleness
    use_convert_raster = False
    try:
        arcpy.sa.ZonalStatisticsAsTable(zone_fc, zone_field, in_value_raster,
                                temp_zonal_table, 'DATA', 'ALL')
    # with Permanent_Identifier as the zone_field, background processing errors
    # and another error get thrown up at random
    # it's faster to do zonal stats as above but if it fails (which it does
    # pretty quickly, usually), do this way which always works but takes
    # twice as long on large rasters
    except:
        use_convert_raster = True
        temp_workspace = cu.create_temp_GDB('temp_zonal')
        convert_raster = os.path.join(temp_workspace,
                        cu.shortname(zone_fc) + '_converted')
        cu.multi_msg('Creating raster {0}'.format(convert_raster))
        arcpy.PolygonToRaster_conversion(zone_fc, zone_field, convert_raster)
        arcpy.sa.ZonalStatisticsAsTable(convert_raster, zone_field, in_value_raster,
                                    temp_zonal_table, "DATA", "ALL")

    if is_thematic:
        #for some reason env.celLSize doesn't work
        desc = arcpy.Describe(in_value_raster)
        cell_size = desc.meanCelLHeight

        # calculate/doit
        temp_area_table = 'in_memory/tab_area_temp'
        cu.multi_msg("Tabulating areas...")

        if use_convert_raster:
            arcpy.sa.TabulateArea(convert_raster, zone_field, in_value_raster,
                                'Value', temp_area_table, cell_size)
        else:
            arcpy.sa.TabulateArea(zone_fc, zone_field, in_value_raster,
                                'Value', temp_area_table, cell_size)

        # making the output table
        arcpy.CopyRows_management(temp_area_table, temp_entire_table)
        zonal_stats_fields = ['AREA']
        arcpy.JoinField_management(temp_entire_table, zone_field, temp_zonal_table, zone_field, zonal_stats_fields)

        # cleanup
        arcpy.Delete_management(temp_area_table)

    if not is_thematic:
        # making the output table
        arcpy.CopyRows_management(temp_zonal_table, temp_entire_table)

    cu.multi_msg("Refining output table...")
    refine_zonal_output(temp_entire_table, is_thematic)



    #final table gets a record even for no-data zones
    keep_fields = [f.name for f in arcpy.ListFields(temp_entire_table)]
    if zone_field.upper() in keep_fields:
        keep_fields.remove(zone_field.upper())
    if zone_field in keep_fields:
        keep_fields.remove(zone_field)
    cu.one_in_one_out(temp_entire_table, keep_fields, zone_fc, zone_field, out_table)
##    cu.redefine_nulls(out_table, keep_fields, ["NA"]* len(keep_fields))

    # count whether all zones got an output record or not)
    out_count = int(arcpy.GetCount_management(temp_entire_table).getOutput(0))
    in_count = int(arcpy.GetCount_management(zone_fc).getOutput(0))
    if out_count < in_count:
        warn_msg = ("WARNING: {0} features are missing in the output table"
                    " because they are too small for this raster's"
                    " resolution. This may be okay depending on your"
                    " application.").format(in_count - out_count)
        arcpy.AddWarning(warn_msg)
        print(warn_msg)

    # cleanup
    arcpy.Delete_management(temp_zonal_table)
    arcpy.Delete_management(temp_entire_table)
    if use_convert_raster:
        arcpy.Delete_management(os.path.dirname(temp_workspace))
    arcpy.CheckInExtension("Spatial")
def connected_wetlands(lakes_fc, lake_id_field, wetlands_fc, out_table):
    env.workspace = 'in_memory'
    env.outputCoordinateSystem = arcpy.SpatialReference(102039)

    arcpy.Buffer_analysis(lakes_fc, 'lakes_30m', '30 meters')

    arcpy.FeatureToLine_management('lakes_30m', 'shorelines')

    # 3 selections for the wetlands types we want to look at
    openwater_exp = """"VegType" = 'PEMorPAB'"""
    forested_exp = """"VegType" = 'PFO'"""
    scrubshrub_exp = """"VegType" = 'PSS'"""
    other_exp = """"VegType" = 'Other'"""
    all_exp = ''


    selections = [all_exp, forested_exp, scrubshrub_exp, openwater_exp, other_exp]
    temp_tables = ['AllWetlands', 'ForestedWetlands', 'ScrubShrubWetlands', 'OpenWaterWetlands', 'OtherWetlands']

    # for each wetland type, get the count of intersection wetlands, and the length of the lake
    # shoreline that is within a wetland polygon
    for sel, temp_table in zip(selections, temp_tables):
        print("Creating temporary table for wetlands where {0}".format(sel))
        # this function adds the count and the area using the lake as the zone
        polygons_in_zones('lakes_30m', lake_id_field, wetlands_fc, temp_table, sel, contrib_area = True)

        # make good field names now rather than later
        for f in new_fields:
            cu.rename_field(temp_table, f, f.replace('Poly', temp_table), True)

        # shoreline calculation
        # using the Shape_Length field so can't do this part in memory
        shoreline_gdb = cu.create_temp_GDB('shoreline')
        selected_wetlands = os.path.join(shoreline_gdb, 'wetlands')
        arcpy.Select_analysis(wetlands_fc, selected_wetlands, sel)
        intersect_output = os.path.join(shoreline_gdb, "intersect")
        arcpy.Intersect_analysis(['shorelines', selected_wetlands], intersect_output)
        arcpy.Statistics_analysis(intersect_output, 'intersect_stats', [['Shape_Length', 'SUM']], lake_id_field)
        cu.one_in_one_out('intersect_stats', ['SUM_Shape_Length'], lakes_fc, lake_id_field, 'temp_shoreline_table')
        cu.redefine_nulls('temp_shoreline_table', ['SUM_Shape_Length'], [0])
        shoreline_field = temp_table + "_Shoreline_Km"
        arcpy.AddField_management('temp_shoreline_table', shoreline_field, 'DOUBLE')
        arcpy.CalculateField_management('temp_shoreline_table', shoreline_field, '!SUM_Shape_Length!/1000', 'PYTHON')

        # join the shoreline value to the temp_table
        arcpy.JoinField_management(temp_table, lake_id_field, 'temp_shoreline_table', lake_id_field, shoreline_field)

        # clean up shoreline intermediates
        for item in [shoreline_gdb, 'intersect_stats', 'temp_shoreline_table']:
            arcpy.Delete_management(item)

    # join em up and copy to final
    temp_tables.remove('AllWetlands')
    for t in temp_tables:
        try:
            arcpy.JoinField_management('AllWetlands', lake_id_field, t, lake_id_field)
        # sometimes there's no table if it was an empty selection
        except:
            empty_fields = [f.replace('Poly', t) for f in new_fields]
            for ef in empty_fields:
                arcpy.AddField_management('AllWetlands', ef, 'Double')
                arcpy.CalculateField_management('AllWetlands', ef, '0', 'PYTHON')
            continue
    # remove all the extra zone fields, which have underscore in name
    drop_fields = [f.name for f in arcpy.ListFields('AllWetlands', 'Permanent_Identifier_*')]
    for f in drop_fields:
        arcpy.DeleteField_management('AllWetlands', f)

    # remove all the overlapping metrics, which do not apply by definition
    fields = [f.name for f in arcpy.ListFields('AlLWetlands')]
    for f in fields:
        if 'Overlapping' in f:
            arcpy.DeleteField_management('AllWetlands', f)
    arcpy.CopyRows_management('AllWetlands', out_table)

    for item in ['AllWetlands'] + temp_tables:
        try:
            arcpy.Delete_management(item)
        except:
            continue
def line_density(zones, zonefield, lines, out_table, interest_selection_expr):
    # Make output folder
##    name = "LineDensity_" + os.path.splitext(os.path.basename(zones))[0]
##    outfolder = os.path.join(topoutfolder, name)
##    if not os.path.exists(outfolder):
##        os.mkdir(outfolder)

    # Environmental Settings
    ws = "in_memory"

    if interest_selection_expr:
        arcpy.MakeFeatureLayer_management(lines, "selected_lines", interest_selection_expr)
    else:
        arcpy.MakeFeatureLayer_management(lines, "selected_lines")

    arcpy.env.workspace = ws
    albers = arcpy.SpatialReference(102039)
    arcpy.env.outputCoordinateSystem = albers
    arcpy.env.extent = zones

    # Zones will be coerced to albers, have to check lines though
    arcpy.CopyFeatures_management(zones, "zones_temp")
    if arcpy.Describe(lines).spatialReference.factoryCode != albers.factoryCode:
        arcpy.AddError("Lines feature class does not have desired projection (Albers USGS). Re-project to factory code 102039 and try again.")
        sys.exit(1)

    # Add hectares field to zones
    arcpy.AddField_management("zones_temp", "ZoneAreaHa", "DOUBLE")
    arcpy.CalculateField_management("zones_temp", "ZoneAreaHa", "!shape.area@hectares!", "PYTHON")

    # Perform identity analysis to join fields and crack lines at polygon boundaries
    cu.multi_msg("Cracking lines at polygon boundaries...")
    arcpy.Identity_analysis("selected_lines", "zones_temp", "lines_identity")
    cu.multi_msg("Cracking lines complete.")

    # Recalculate lengths
    arcpy.AddField_management("lines_identity", "LengthM", "DOUBLE")
    arcpy.CalculateField_management("lines_identity", "LengthM", '!shape.length@meters!', "PYTHON")

    # Summarize statistics by zone
    arcpy.Statistics_analysis("lines_identity", "length_in_zone", "LengthM SUM", zonefield)


    # Join ZoneAreaHa to table
    arcpy.JoinField_management("length_in_zone", zonefield, "zones_temp" , zonefield, "ZoneAreaHa")

    # Delete rows in table with zero for zone area
##    with arcpy.da.UpdateCursor("length_in_zone", "ZoneAreaHa") as cursor:
##        for row in cursor:
##            if row[0] is None:
##                cursor.deleteRow()

    # Add Density field and calc
    arcpy.AddField_management("length_in_zone", "Density_MperHA", "DOUBLE",'','','','',"NULLABLE")
    exp = "!SUM_LengthM! / !ZONEAREAHA!"
    arcpy.CalculateField_management("length_in_zone", "Density_MperHA", exp, "PYTHON")

    cu.one_in_one_out("length_in_zone", ['SUM_LengthM', 'Density_MperHA'], zones, zonefield, out_table)
    cu.redefine_nulls(out_table, ['SUM_LengthM', 'Density_MperHA'], [0, 0])


##    # Join to the original table
##    keep_fields = ["ZoneID", "SUM_LengthM", "Density_MperHA"]
##    arcpy.JoinField_management('zones_temp', zonefield, "length_in_zone", zonefield, keep_fields[1:])
##
##    # Select only null records and change to 0
##    arcpy.MakeFeatureLayer_management('zones_temp', 'zones_temp_lyr')
##    arcpy.SelectLayerByAttribute_management('zones_temp_lyr', "NEW_SELECTION", '''"SUM_LengthM" is null''')
##    fields_to_calc = ["SUM_LengthM", "Density_MperHA"]
##    for f in fields_to_calc:
##        arcpy.CalculateField_management('zones_temp_lyr', f, 0, "PYTHON")
##
##    #Delete all the fields that aren't the ones I need
##    keep_fields = ["ZoneID", "SUM_LengthM", "Density_MperHA"]
##    all_fields = [f.name for f in arcpy.ListFields('zones_temp_lyr')]
##    for f in all_fields:
##        if f not in keep_fields:
##            try:
##                arcpy.DeleteField_management('zones_temp_lyr', f)
##            except:
##                continue
##    arcpy.SelectLayerByAttribute_management('zones_temp_lyr', 'CLEAR_SELECTION')
##
##    arcpy.CopyRows_management('zones_temp_lyr', out_table)

    for tempitem in ['zones_temp', 'lines_identity', 'length_in_zone']:
        arcpy.Delete_management(tempitem)

    return out_table
def stats_area_table(zone_fc,
                     zone_field,
                     in_value_raster,
                     out_table,
                     is_thematic,
                     debug_mode=False):
    orig_env = arcpy.env.workspace
    if debug_mode:
        arcpy.env.overwriteOutput = True
        temp_gdb = cu.create_temp_GDB('zonal_tabarea')
        arcpy.env.workspace = temp_gdb
        arcpy.AddMessage('Debugging workspace located at {}'.format(temp_gdb))
    else:
        arcpy.env.workspace = 'in_memory'
    arcpy.CheckOutExtension("Spatial")

    # Set up environments for alignment between zone raster and theme raster
    this_files_dir = os.path.dirname(os.path.abspath(__file__))
    os.chdir(this_files_dir)
    common_grid = os.path.abspath('../common_grid.tif')
    env.snapRaster = common_grid
    env.cellSize = common_grid
    CELL_SIZE = 30
    env.extent = zone_fc

    zone_desc = arcpy.Describe(zone_fc)
    zone_raster = 'convertraster'
    if zone_desc.dataType != 'RasterDataset':
        arcpy.PolygonToRaster_conversion(zone_fc,
                                         zone_field,
                                         zone_raster,
                                         'CELL_CENTER',
                                         cellsize=CELL_SIZE)
    else:
        zone_raster = zone_fc

    # I tested and there is no need to resample the raster being summarized. It will be resampled correctly
    # internally in the following tool given that the necessary environments are set above (cell size, snap).
    #in_value_raster = arcpy.Resample_management(in_value_raster, 'in_value_raster_resampled', CELL_SIZE)
    if not is_thematic:
        arcpy.AddMessage("Calculating Zonal Statistics...")
        temp_entire_table = arcpy.sa.ZonalStatisticsAsTable(
            zone_raster, zone_field, in_value_raster, 'temp_zonal_table',
            'DATA', 'MIN_MAX_MEAN')

    if is_thematic:
        #for some reason env.cellSize doesn't work
        # calculate/doit
        arcpy.AddMessage("Tabulating areas...")
        temp_entire_table = arcpy.sa.TabulateArea(zone_raster, zone_field,
                                                  in_value_raster, 'Value',
                                                  'temp_area_table', CELL_SIZE)

        # replaces join to Zonal Stats in previous versions of tool
        # no joining, just calculate the area/count from what's produced by TabulateArea
        arcpy.AddField_management(temp_entire_table, 'AREA', 'DOUBLE')
        arcpy.AddField_management(temp_entire_table, 'COUNT', 'DOUBLE')

        cursor_fields = ['AREA', 'COUNT']
        value_fields = [
            f.name for f in arcpy.ListFields(temp_entire_table, 'VALUE*')
        ]
        cursor_fields.extend(value_fields)
        with arcpy.da.UpdateCursor(temp_entire_table,
                                   cursor_fields) as uCursor:
            for uRow in uCursor:
                area, count, value_fields = uRow[0], uRow[1], uRow[2:]
                area = sum(value_fields)
                count = round(area / (CELL_SIZE * CELL_SIZE), 0)
                new_row = [area, count] + value_fields
                uCursor.updateRow(new_row)

    arcpy.AddMessage("Refining output table...")

    arcpy.AddField_management(temp_entire_table, 'DataCoverage_pct', 'DOUBLE')

    # calculate DataCoverage_pct by comparing to original areas in zone raster
    # alternative to using JoinField, which is prohibitively slow if zones exceed hu12 count
    zone_raster_dict = {
        row[0]: row[1]
        for row in arcpy.da.SearchCursor(zone_raster, [zone_field, 'Count'])
    }
    temp_entire_table_dict = {
        row[0]: row[1]
        for row in arcpy.da.SearchCursor(temp_entire_table,
                                         [zone_field, 'COUNT'])
    }
    with arcpy.da.UpdateCursor(temp_entire_table,
                               [zone_field, 'DataCoverage_Pct']) as cursor:
        for uRow in cursor:
            key_value, data_pct = uRow
            count_orig = zone_raster_dict[key_value]
            if key_value in temp_entire_table_dict:
                count_summarized = temp_entire_table_dict[key_value]
                data_pct = 100 * float(count_summarized / count_orig)
            else:
                data_pct = None
            cursor.updateRow((key_value, data_pct))

    # Refine the output
    refine_zonal_output(temp_entire_table, zone_field, is_thematic)

    # final table gets a record even for no-data zones
    keep_fields = [f.name for f in arcpy.ListFields(temp_entire_table)]
    if zone_field.upper() in keep_fields:
        keep_fields.remove(zone_field.upper())
        zone_field = zone_field.upper()
    if zone_field in keep_fields:
        keep_fields.remove(zone_field)

    # not needed as long we are working only with rasters
    # in order to add vector capabilities back, need to do something with this
    # right now we just can't fill in polygon zones that didn't convert to raster in our system
    cu.one_in_one_out(temp_entire_table, keep_fields, zone_fc, zone_field,
                      out_table)

    # Convert "DataCoverage_pct" values to 0 for zones with no metrics calculated
    codeblock = """def convert_pct(arg1):
        if arg1 is None:
            return float(0)
        else:
            return arg1"""
    arcpy.CalculateField_management(out_table, 'DataCoverage_pct',
                                    'convert_pct(!DataCoverage_pct!)',
                                    'PYTHON_9.3', codeblock)

    # count whether all zones got an output record or not)
    out_count = int(arcpy.GetCount_management(temp_entire_table).getOutput(0))
    in_count = int(arcpy.GetCount_management(zone_fc).getOutput(0))
    count_diff = in_count - out_count

    # cleanup
    if not debug_mode:
        for item in [
                'temp_zonal_table', 'temp_entire_table', 'in_memory',
                'zones_VAT'
        ]:
            arcpy.Delete_management(item)
    arcpy.ResetEnvironments()
    arcpy.env.workspace = orig_env  # hope this prevents problems using list of FCs from workspace as batch
    arcpy.CheckInExtension("Spatial")

    return [out_table, count_diff]
def polygons_in_zones(zone_fc, zone_field, polygons_of_interest, output_table,
                      interest_selection_expr):
    old_workspace = arcpy.env.workspace
    arcpy.env.workspace = 'in_memory'
    arcpy.SetLogHistory(False)
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(102039)
    selected_polys = 'selected_polys'
    # fixes some stupid ArcGIS thing with the interactive Python window
    if arcpy.Exists(selected_polys):
        arcpy.env.overwriteOutput = True

    arcpy.AddMessage('Copying/selecting polygon features...')
    if interest_selection_expr:
        arcpy.Select_analysis(polygons_of_interest, selected_polys,
                              interest_selection_expr)
    else:
        arcpy.CopyFeatures_management(polygons_of_interest, selected_polys)

    # use tabulate intersection for the areas overlapping
    arcpy.AddMessage('Tabulating intersection between zones and polygons...')
    tab_table = arcpy.TabulateIntersection_analysis(
        zone_fc, zone_field, selected_polys, 'tabulate_intersection_table')

    # area was calculated in map units which was m2 so convert to hectares
    arcpy.AddField_management(tab_table, 'Poly_Ha', 'DOUBLE')
    arcpy.CalculateField_management(tab_table, 'Poly_Ha', '!AREA!/10000',
                                    'PYTHON')

    # just change the name of the percent field
    arcpy.AlterField_management(tab_table, 'PERCENTAGE', 'Poly_Pct')

    # Now just get the count as there is no other area metric anymore
    spjoin_fc = arcpy.SpatialJoin_analysis(zone_fc, selected_polys,
                                           'spatial_join_output')
    arcpy.AlterField_management(spjoin_fc, 'Join_Count', 'Poly_n')

    # Add the density
    arcpy.AddField_management(spjoin_fc, 'Poly_nperha', 'DOUBLE')
    arcpy.CalculateField_management(spjoin_fc, 'Poly_nperha',
                                    '!Poly_n!/!shape.area@hectares!', 'PYTHON')

    arcpy.AddMessage('Refining output...')
    arcpy.JoinField_management(tab_table, zone_field, spjoin_fc, zone_field,
                               ["Poly_n", 'Poly_nperha'])
    final_fields = ['Poly_Ha', 'Poly_Pct', 'Poly_n', 'Poly_nperha']

    # make output nice
    arcpy.env.overwriteOutput = False
    cu.one_in_one_out(tab_table, final_fields, zone_fc, zone_field,
                      output_table)

    cu.redefine_nulls(output_table, final_fields, [0, 0, 0, 0])

    # clean up
    # can't delete all of in_memory because this function is meant to be called from another one that uses in_memory
    for item in [selected_polys, tab_table, spjoin_fc]:
        arcpy.Delete_management(item)
    arcpy.env.workspace = old_workspace

    arcpy.AddMessage('Polygons in zones tool complete.')
    arcpy.SetLogHistory(True)
    def stats_area_table(zone_fc=zone_fc,
                         zone_field=zone_field,
                         in_value_raster=in_value_raster,
                         out_table=out_table,
                         is_thematic=is_thematic):
        def refine_zonal_output(t):
            """Makes a nicer output for this tool. Rename some fields, drop unwanted
                ones, calculate percentages using raster AREA before deleting that
                field."""
            if is_thematic:
                value_fields = arcpy.ListFields(t, "VALUE*")
                pct_fields = [
                    '{}_pct'.format(f.name) for f in value_fields
                ]  # VALUE_41_pct, etc. Field can't start with number.

                # add all the new fields needed
                for f, pct_field in zip(value_fields, pct_fields):
                    arcpy.AddField_management(t, pct_field, f.type)

                # calculate the percents
                cursor_fields = ['AREA'] + [f.name
                                            for f in value_fields] + pct_fields
                uCursor = arcpy.da.UpdateCursor(t, cursor_fields)
                for uRow in uCursor:
                    # unpacks area + 3 tuples of the right fields for each, no matter how many there are
                    vf_i_end = len(value_fields) + 1
                    pf_i_end = vf_i_end + len(pct_fields)

                    # pct_values and ha_values are both null at this point but unpack for clarity
                    area, value_values, pct_values = uRow[0], uRow[
                        1:vf_i_end], uRow[vf_i_end:pf_i_end]
                    new_pct_values = [100 * vv / area for vv in value_values]
                    new_row = [area] + value_values + new_pct_values
                    uCursor.updateRow(new_row)

                for vf in value_fields:
                    arcpy.DeleteField_management(t, vf.name)

            arcpy.AlterField_management(t, 'COUNT', 'CELL_COUNT')
            drop_fields = ['ZONE_CODE', 'COUNT', 'AREA']
            if not debug_mode:
                for df in drop_fields:
                    try:
                        arcpy.DeleteField_management(t, df)
                    except:
                        continue

        # Set up environments for alignment between zone raster and theme raster
        if isinstance(zone_fc, arcpy.Result):
            zone_fc = zone_fc.getOutput(0)
        this_files_dir = os.path.dirname(os.path.abspath(__file__))
        os.chdir(this_files_dir)
        common_grid = os.path.abspath('../common_grid.tif')
        env.snapRaster = common_grid
        env.cellSize = common_grid
        env.extent = zone_fc

        zone_desc = arcpy.Describe(zone_fc)
        zone_raster = 'convertraster'
        if zone_desc.dataType not in ['RasterDataset', 'RasterLayer']:
            zone_raster = arcpy.PolygonToRaster_conversion(
                zone_fc,
                zone_field,
                zone_raster,
                'CELL_CENTER',
                cellsize=env.cellSize)
            print('cell size is {}'.format(env.cellSize))
            zone_size = int(env.cellSize)
        else:
            zone_raster = zone_fc
            zone_size = min(
                arcpy.Describe(zone_raster).meanCellHeight,
                arcpy.Describe(zone_raster).meanCellWidth)
            raster_size = min(
                arcpy.Describe(in_value_raster).meanCellHeight,
                arcpy.Describe(in_value_raster).meanCellWidth)
            env.cellSize = min([zone_size, raster_size])
            print('cell size is {}'.format(env.cellSize))

        # I tested and there is no need to resample the raster being summarized. It will be resampled correctly
        # internally in the following tool given that the necessary environments are set above (cell size, snap).
        # # in_value_raster = arcpy.Resample_management(in_value_raster, 'in_value_raster_resampled', CELL_SIZE)
        if not is_thematic:
            arcpy.AddMessage("Calculating Zonal Statistics...")
            temp_entire_table = arcpy.sa.ZonalStatisticsAsTable(
                zone_raster, zone_field, in_value_raster, 'temp_zonal_table',
                'DATA', 'MEAN')

        if is_thematic:
            # for some reason env.cellSize doesn't work
            # calculate/doit
            arcpy.AddMessage("Tabulating areas...")
            temp_entire_table = arcpy.sa.TabulateArea(
                zone_raster,
                zone_field,
                in_value_raster,
                'Value',
                'temp_area_table',
                processing_cell_size=env.cellSize)
            # TabulateArea capitalizes the zone for some annoying reason and ArcGIS is case-insensitive to field names
            # so we have this work-around:
            zone_field_t = '{}_t'.format(zone_field)
            DM.AddField(temp_entire_table,
                        zone_field_t,
                        'TEXT',
                        field_length=20)
            expr = '!{}!'.format(zone_field.upper())
            DM.CalculateField(temp_entire_table, zone_field_t, expr, 'PYTHON')
            DM.DeleteField(temp_entire_table, zone_field.upper())
            DM.AlterField(temp_entire_table,
                          zone_field_t,
                          zone_field,
                          clear_field_alias=True)

            # replaces join to Zonal Stats in previous versions of tool
            # no joining, just calculate the area/count from what's produced by TabulateArea
            arcpy.AddField_management(temp_entire_table, 'AREA', 'DOUBLE')
            arcpy.AddField_management(temp_entire_table, 'COUNT', 'DOUBLE')

            cursor_fields = ['AREA', 'COUNT']
            value_fields = [
                f.name for f in arcpy.ListFields(temp_entire_table, 'VALUE*')
            ]
            cursor_fields.extend(value_fields)
            with arcpy.da.UpdateCursor(temp_entire_table,
                                       cursor_fields) as uCursor:
                for uRow in uCursor:
                    area, count, value_fields = uRow[0], uRow[1], uRow[2:]
                    area = sum(value_fields)
                    count = round(
                        area / (int(env.cellSize) * int(env.cellSize)), 0)
                    new_row = [area, count] + value_fields
                    uCursor.updateRow(new_row)

        arcpy.AddMessage("Refining output table...")

        arcpy.AddField_management(temp_entire_table, 'datacoveragepct',
                                  'DOUBLE')
        arcpy.AddField_management(temp_entire_table, 'ORIGINAL_COUNT', 'LONG')

        # calculate datacoveragepct by comparing to original areas in zone raster
        # alternative to using JoinField, which is prohibitively slow if zones exceed hu12 count
        zone_raster_dict = {
            row[0]: row[1]
            for row in arcpy.da.SearchCursor(zone_raster,
                                             [zone_field, 'Count'])
        }
        temp_entire_table_dict = {
            row[0]: row[1]
            for row in arcpy.da.SearchCursor(temp_entire_table,
                                             [zone_field, 'COUNT'])
        }

        sum_cell_area = float(env.cellSize) * float(env.cellSize)
        orig_cell_area = zone_size * zone_size

        with arcpy.da.UpdateCursor(
                temp_entire_table,
            [zone_field, 'datacoveragepct', 'ORIGINAL_COUNT']) as cursor:
            for uRow in cursor:
                key_value, data_pct, count_orig = uRow
                count_orig = zone_raster_dict[key_value]
                if key_value in temp_entire_table_dict:
                    count_summarized = temp_entire_table_dict[key_value]
                    data_pct = 100 * float((count_summarized * sum_cell_area) /
                                           (count_orig * orig_cell_area))
                else:
                    data_pct = None
                cursor.updateRow((key_value, data_pct, count_orig))

        # Refine the output
        refine_zonal_output(temp_entire_table)

        # in order to add vector capabilities back, need to do something with this
        # right now we just can't fill in polygon zones that didn't convert to raster in our system
        stats_result = cu.one_in_one_out(temp_entire_table, zone_fc,
                                         zone_field, out_table)

        # Convert "datacoveragepct" and "ORIGINAL_COUNT" values to 0 for zones with no metrics calculated
        with arcpy.da.UpdateCursor(
                out_table,
            [zone_field, 'datacoveragepct', 'ORIGINAL_COUNT', 'CELL_COUNT'
             ]) as u_cursor:
            for row in u_cursor:
                # data_coverage pct to 0
                if row[1] is None:
                    row[1] = 0
                # original count filled in if a) zone outside raster bounds or b) zone too small to be rasterized
                if row[2] is None:
                    if row[0] in zone_raster_dict:
                        row[2] = zone_raster_dict[row[0]]
                    else:
                        row[2] = 0
                # cell count set to 0
                if row[3] is None:
                    row[3] = 0
                u_cursor.updateRow(row)

        # count whether all zones got an output record or not)
        out_count = int(
            arcpy.GetCount_management(temp_entire_table).getOutput(0))
        in_count = int(arcpy.GetCount_management(zone_fc).getOutput(0))
        count_diff = in_count - out_count

        # cleanup
        if not debug_mode:
            for item in [
                    'temp_zonal_table', temp_entire_table, 'convertraster'
            ]:  # don't add zone_raster, orig
                arcpy.Delete_management(item)
        arcpy.ResetEnvironments()
        env.workspace = orig_env  # hope this prevents problems using list of FCs from workspace as batch
        arcpy.CheckInExtension("Spatial")

        return [stats_result, count_diff]
def polygons_in_zones(zone_fc,
                      zone_field,
                      polygons_of_interest,
                      output_table,
                      interest_selection_expr,
                      contrib_area=True):
    old_workspace = arcpy.env.workspace
    arcpy.env.workspace = 'in_memory'
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(102039)

    temp_polyzones = cu.create_temp_GDB('temp_polyzones')
    selected_polys = os.path.join(temp_polyzones, 'selected_polys')
    cu.multi_msg('Copying/selecting polygon features...')
    if interest_selection_expr:
        arcpy.Select_analysis(polygons_of_interest, selected_polys,
                              interest_selection_expr)
    else:
        arcpy.CopyFeatures_management(polygons_of_interest, selected_polys)

    arcpy.AddField_management(selected_polys, 'POLYAREA_ha', 'DOUBLE')
    arcpy.CalculateField_management(selected_polys, 'POLYAREA_ha',
                                    '!shape.area@hectares!', 'PYTHON')

    # use tabulate intersection for the areas overlapping
    tab_table = 'tabulate_intersection_table'
    cu.multi_msg('Tabulating intersection between zones and polygons...')
    arcpy.TabulateIntersection_analysis(zone_fc, zone_field, selected_polys,
                                        tab_table)

    # area was calculated in map units which was m2 so convert to hectares
    arcpy.AddField_management(tab_table, 'Poly_Overlapping_AREA_ha', 'DOUBLE')
    arcpy.CalculateField_management(tab_table, 'Poly_Overlapping_AREA_ha',
                                    '!AREA!/10000', 'PYTHON')

    # just change the name of the percent field
    cu.rename_field(tab_table, 'PERCENTAGE', 'Poly_Overlapping_AREA_pct', True)
    spjoin_fc = 'spatial_join_output'

    # Spatial join for the count and contributing area
    fms = arcpy.FieldMappings()

    fm_zone_id = arcpy.FieldMap()
    fm_zone_id.addInputField(zone_fc, zone_field)

    fm_count = arcpy.FieldMap()
    fm_count.addInputField(selected_polys, 'POLYAREA_ha')
    count_name = fm_count.outputField
    count_name.name = 'Poly_Count'
    count_name.alias = 'Poly_Count'
    fm_count.outputField = count_name
    fm_count.mergeRule = 'Count'

    fm_contrib_area = arcpy.FieldMap()
    fm_contrib_area.addInputField(selected_polys, 'POLYAREA_ha')
    contrib_area_name = fm_contrib_area.outputField
    contrib_area_name.name = 'Poly_Contributing_AREA_ha'
    contrib_area_name.alias = 'Poly_Contributing_AREA_ha'
    fm_contrib_area.outputField = contrib_area_name
    fm_contrib_area.mergeRule = 'Sum'

    fms.addFieldMap(fm_zone_id)
    fms.addFieldMap(fm_count)
    fms.addFieldMap(fm_contrib_area)

    cu.multi_msg('Spatial join between zones and wetlands...')
    arcpy.SpatialJoin_analysis(zone_fc, selected_polys, spjoin_fc,
                               "JOIN_ONE_TO_ONE", "KEEP_ALL", fms, "INTERSECT")

    cu.multi_msg('Refining output...')
    arcpy.JoinField_management(tab_table, zone_field, spjoin_fc, zone_field,
                               ["Poly_Count", "Poly_Contributing_AREA_ha"])
    final_fields = [
        'Poly_Overlapping_AREA_ha', 'Poly_Overlapping_AREA_pct', 'Poly_Count',
        'Poly_Contributing_AREA_ha'
    ]

    # make output nice
    cu.one_in_one_out(tab_table, final_fields, zone_fc, zone_field,
                      output_table)
    cu.redefine_nulls(output_table, final_fields, [0, 0, 0, 0])

    # clean up
    for item in [selected_polys, tab_table, spjoin_fc]:
        arcpy.Delete_management(item)
    arcpy.Delete_management(temp_polyzones)
    arcpy.env.workspace = old_workspace

    cu.multi_msg('Polygons in zones tool complete.')
Beispiel #12
0
def line_density(zones, zonefield, lines, out_table, interest_selection_expr):
    # Make output folder
    ##    name = "LineDensity_" + os.path.splitext(os.path.basename(zones))[0]
    ##    outfolder = os.path.join(topoutfolder, name)
    ##    if not os.path.exists(outfolder):
    ##        os.mkdir(outfolder)

    # Environmental Settings
    ws = "in_memory"

    if interest_selection_expr:
        arcpy.MakeFeatureLayer_management(lines, "selected_lines",
                                          interest_selection_expr)
    else:
        arcpy.MakeFeatureLayer_management(lines, "selected_lines")

    arcpy.env.workspace = ws
    albers = arcpy.SpatialReference(102039)
    arcpy.env.outputCoordinateSystem = albers
    arcpy.env.extent = zones

    # Zones will be coerced to albers, have to check lines though
    arcpy.CopyFeatures_management(zones, "zones_temp")
    if arcpy.Describe(
            lines).spatialReference.factoryCode != albers.factoryCode:
        arcpy.AddError(
            "Lines feature class does not have desired projection (Albers USGS). Re-project to factory code 102039 and try again."
        )
        sys.exit(1)

    # Add hectares field to zones
    arcpy.AddField_management("zones_temp", "ZoneAreaHa", "DOUBLE")
    arcpy.CalculateField_management("zones_temp", "ZoneAreaHa",
                                    "!shape.area@hectares!", "PYTHON")

    # Perform identity analysis to join fields and crack lines at polygon boundaries
    cu.multi_msg("Cracking lines at polygon boundaries...")
    arcpy.Identity_analysis("selected_lines", "zones_temp", "lines_identity")
    cu.multi_msg("Cracking lines complete.")

    # Recalculate lengths
    arcpy.AddField_management("lines_identity", "LengthM", "DOUBLE")
    arcpy.CalculateField_management("lines_identity", "LengthM",
                                    '!shape.length@meters!', "PYTHON")

    # Summarize statistics by zone
    arcpy.Statistics_analysis("lines_identity", "length_in_zone",
                              "LengthM SUM", zonefield)

    # Join ZoneAreaHa to table
    arcpy.JoinField_management("length_in_zone", zonefield, "zones_temp",
                               zonefield, "ZoneAreaHa")

    # Delete rows in table with zero for zone area
    ##    with arcpy.da.UpdateCursor("length_in_zone", "ZoneAreaHa") as cursor:
    ##        for row in cursor:
    ##            if row[0] is None:
    ##                cursor.deleteRow()

    # Add Density field and calc
    arcpy.AddField_management("length_in_zone", "Density_MperHA", "DOUBLE", '',
                              '', '', '', "NULLABLE")
    exp = "!SUM_LengthM! / !ZONEAREAHA!"
    arcpy.CalculateField_management("length_in_zone", "Density_MperHA", exp,
                                    "PYTHON")

    cu.one_in_one_out("length_in_zone", ['SUM_LengthM', 'Density_MperHA'],
                      zones, zonefield, out_table)
    cu.redefine_nulls(out_table, ['SUM_LengthM', 'Density_MperHA'], [0, 0])

    ##    # Join to the original table
    ##    keep_fields = ["ZoneID", "SUM_LengthM", "Density_MperHA"]
    ##    arcpy.JoinField_management('zones_temp', zonefield, "length_in_zone", zonefield, keep_fields[1:])
    ##
    ##    # Select only null records and change to 0
    ##    arcpy.MakeFeatureLayer_management('zones_temp', 'zones_temp_lyr')
    ##    arcpy.SelectLayerByAttribute_management('zones_temp_lyr', "NEW_SELECTION", '''"SUM_LengthM" is null''')
    ##    fields_to_calc = ["SUM_LengthM", "Density_MperHA"]
    ##    for f in fields_to_calc:
    ##        arcpy.CalculateField_management('zones_temp_lyr', f, 0, "PYTHON")
    ##
    ##    #Delete all the fields that aren't the ones I need
    ##    keep_fields = ["ZoneID", "SUM_LengthM", "Density_MperHA"]
    ##    all_fields = [f.name for f in arcpy.ListFields('zones_temp_lyr')]
    ##    for f in all_fields:
    ##        if f not in keep_fields:
    ##            try:
    ##                arcpy.DeleteField_management('zones_temp_lyr', f)
    ##            except:
    ##                continue
    ##    arcpy.SelectLayerByAttribute_management('zones_temp_lyr', 'CLEAR_SELECTION')
    ##
    ##    arcpy.CopyRows_management('zones_temp_lyr', out_table)

    for tempitem in ['zones_temp', 'lines_identity', 'length_in_zone']:
        arcpy.Delete_management(tempitem)

    return out_table