Example #1
0
def Coalesce(inFeats, dilDist, outFeats, scratchGDB="in_memory"):
    '''If a positive number is entered for the dilation distance, features are expanded outward by the specified distance, then shrunk back in by the same distance. This causes nearby features to coalesce. If a negative number is entered for the dilation distance, features are first shrunk, then expanded. This eliminates narrow portions of existing features, thereby simplifying them. It can also break narrow "bridges" between features that were formerly coalesced.'''

    # If it's a string, parse dilation distance and get the negative
    if type(dilDist) == str:
        origDist, units, meas = multiMeasure(dilDist, 1)
        negDist, units, negMeas = multiMeasure(dilDist, -1)
    else:
        origDist = dilDist
        meas = dilDist
        negDist = -1 * origDist
        negMeas = negDist

    # Parameter check
    if origDist == 0:
        arcpy.AddError(
            "You need to enter a non-zero value for the dilation distance")
        raise arcpy.ExecuteError

    # Set parameters. Dissolve parameter depends on dilation distance.
    if origDist > 0:
        dissolve1 = "ALL"
        dissolve2 = "NONE"
    else:
        dissolve1 = "NONE"
        dissolve2 = "ALL"

    # Process: Buffer
    Buff1 = scratchGDB + os.sep + "Buff1"
    arcpy.Buffer_analysis(inFeats, Buff1, meas, "FULL", "ROUND", dissolve1, "",
                          "PLANAR")

    # Process: Clean Features
    Clean_Buff1 = scratchGDB + os.sep + "CleanBuff1"
    CleanFeatures(Buff1, Clean_Buff1)

    # Process:  Generalize Features
    # This should prevent random processing failures on features with many vertices, and also speed processing in general
    arcpy.Generalize_edit(Clean_Buff1, "0.1 Meters")

    # Eliminate gaps
    # Added step due to weird behavior on some buffers
    Clean_Buff1_ng = scratchGDB + os.sep + "Clean_Buff1_ng"
    arcpy.EliminatePolygonPart_management(Clean_Buff1, Clean_Buff1_ng, "AREA",
                                          "900 SQUAREMETERS", "",
                                          "CONTAINED_ONLY")

    # Process: Buffer
    Buff2 = scratchGDB + os.sep + "NegativeBuffer"
    arcpy.Buffer_analysis(Clean_Buff1_ng, Buff2, negMeas, "FULL", "ROUND",
                          dissolve2, "", "PLANAR")

    # Process: Clean Features to get final dilated features
    CleanFeatures(Buff2, outFeats)

    # Cleanup
    if scratchGDB == "in_memory":
        garbagePickup([Buff1, Clean_Buff1, Buff2])

    return outFeats
Example #2
0
    def __removePolygonHoles__(self, polyFC, path):
        try:

            result = arcpy.EliminatePolygonPart_management(
                polyFC, os.path.join(path,
                                     "GlobalWatershed"), "AREA_OR_PERCENT",
                "90 squaremeters", 1, "ANY")  #modified CONTAINED_ONLY

            self.__sm__(arcpy.GetMessages())
            return result
        except:
            tb = traceback.format_exc()
            self.__sm__("Error removing holes " + tb, "ERROR")
            return polyFC
Example #3
0
def updateMDLASGeometry(geometry_type, filegdb_path, md_path, area_percent, point_interval):
    '''
    geometry_type = ["BOUNDARY", "FOOTPRINT"]
    '''
    geometry_name = "LAS"
    # Create an in-memory feature class to hold the geometry
    geometry_export = os.path.join(filegdb_path, "{}_{}_Export".format(geometry_type, geometry_name))
    if arcpy.Exists(geometry_export):
        arcpy.Delete_management(geometry_export)
        Utility.addToolMessages()
    
    # Export the geometry to the in-memory feature class  
    arcpy.ExportMosaicDatasetGeometry_management(md_path, geometry_export, where_clause="#", geometry_type=geometry_type)
    Utility.addToolMessages()
    
    # Remove the holes and save to a feature class in the file geodatabase
    geometry_no_holes = os.path.join(filegdb_path, "{}_{}_NoHoles".format(geometry_type, geometry_name))
    if arcpy.Exists(geometry_no_holes):
        arcpy.Delete_management(geometry_no_holes)
        Utility.addToolMessages()
    
    arcpy.EliminatePolygonPart_management(geometry_export, geometry_no_holes, condition="PERCENT", part_area="0 SquareMeters", part_area_percent=area_percent, part_option="CONTAINED_ONLY")
    Utility.addToolMessages()

    # Smooth the polygons
    geometry_smooth = os.path.join(filegdb_path, "{}_{}".format(geometry_type, geometry_name))
    if arcpy.Exists(geometry_smooth):
        arcpy.Delete_management(geometry_smooth)
        Utility.addToolMessages()
    
    arcpy.SmoothPolygon_cartography(geometry_no_holes, geometry_smooth, "PAEK", point_interval, "FIXED_ENDPOINT", "NO_CHECK")
    Utility.addToolMessages()
      
    # Clean up
    if arcpy.Exists(geometry_export):
        arcpy.Delete_management(geometry_export)
        Utility.addToolMessages()
    if arcpy.Exists(geometry_no_holes):
        arcpy.Delete_management(geometry_no_holes)
        Utility.addToolMessages()
    
    # import simplified Footprints/boundary
    arcpy.ImportMosaicDatasetGeometry_management(md_path, target_featureclass_type=geometry_type, target_join_field="OBJECTID",
                                                 input_featureclass=geometry_smooth, input_join_field="OBJECTID")
    Utility.addToolMessages()
Example #4
0
def Step6EliminatePolygonPart(
        Output_Buffer,
        Output_Finished,
        Condition="AREA",
        Area="0 Unknown",
        Percentage=0,
        Eliminate_contained_parts_only=True):  # Step6EliminatePolygonPart

    # To allow overwriting outputs change overwriteOutput option to True.
    arcpy.env.overwriteOutput = False

    # Process: Eliminate Polygon Part (Eliminate Polygon Part)
    arcpy.EliminatePolygonPart_management(
        in_features=Output_Buffer,
        out_feature_class=Output_Finished,
        condition=Condition,
        part_area=Area,
        part_area_percent=Percentage,
        part_option=Eliminate_contained_parts_only)
Example #5
0
def better_contour(inputclass, outputclass):
    print("inputclass", inputclass)
    print("outputclass", outputclass)
    arcpy.Merge_management(inputclass, "in_memory/after_merge")
    merger_all("in_memory/after_merge", "in_memory/after_diss_all")
    arcpy.Delete_management("in_memory/after_merge")
    print("merge all")
    arcpy.EliminatePolygonPart_management("in_memory/after_diss_all",
                                          "in_memory/after_eli",
                                          "AREA",
                                          1000000,
                                          part_option="CONTAINED_ONLY")
    arcpy.Delete_management("in_memory/after_diss_all")
    print("create contour")
    arcpy.SimplifyPolygon_cartography("in_memory/after_eli",
                                      outputclass,
                                      algorithm="POINT_REMOVE",
                                      tolerance=1,
                                      error_option="NO_CHECK",
                                      collapsed_point_option="NO_KEEP")
    print("complete")
Example #6
0
def Overview(Input_Geologic_Features,
             Output_Finished,
             Aggregation_Distance,
             Minimum_Area="0 Unknown",
             Minimum_Hole_Size="0 Unknown",
             Preserve_orthogonal_shape=False,
             Barrier_Features,
             Simplification_Algorithm="POINT_REMOVE",
             Simplification_Tolerance,
             Minimum_Area_2_="0 Unknown",
             Handling_Topological_Errors="RESOLVE_ERRORS",
             Keep_collapsed_points=True,
             Input_Barrier_Layers,
             Smoothing_Algorithm="PAEK",
             Smoothing_Tolerance,
             Preserve_endpoint_for_rings=True,
             Handling_Topological_Errors_2_="NO_CHECK",
             Input_Barrier_Layers_2_,
             Distance_value_or_field_,
             Side_Type="FULL",
             End_Type="ROUND",
             Dissolve_Type="NONE",
             Dissolve_Field_s_,
             Method="PLANAR",
             Condition="AREA",
             Area="0 Unknown",
             Percentage=0,
             Eliminate_contained_parts_only=True):  # Overview

    # To allow overwriting outputs change overwriteOutput option to True.
    arcpy.env.overwriteOutput = False

    # Process: Union (Union)
    Output_Union = ""
    arcpy.Union_analysis(in_features=Input_Geologic_Features,
                         out_feature_class=Output_Union,
                         join_attributes="ALL",
                         cluster_tolerance="",
                         gaps="GAPS")

    # Process: Multipart To Singlepart (Multipart To Singlepart)
    Output_Singlepart = ""
    arcpy.MultipartToSinglepart_management(in_features=Output_Union,
                                           out_feature_class=Output_Singlepart)

    # Process: Aggregate Polygons (Aggregate Polygons)
    Output_Aggregate = ""
    arcpy.AggregatePolygons_cartography(
        in_features=Output_Singlepart,
        out_feature_class=Output_Aggregate,
        aggregation_distance=Aggregation_Distance,
        minimum_area=Minimum_Area,
        minimum_hole_size=Minimum_Hole_Size,
        orthogonality_option=Preserve_orthogonal_shape,
        barrier_features=Barrier_Features,
        out_table=Output_Table)

    # Process: Simplify Polygon (Simplify Polygon)
    Output_Simplify = ""
    output_feature_class_Pnt = \
    arcpy.SimplifyPolygon_cartography(in_features=Output_Aggregate, out_feature_class=Output_Simplify,
                                      algorithm=Simplification_Algorithm, tolerance=Simplification_Tolerance,
                                      minimum_area=Minimum_Area_2_, error_option=Handling_Topological_Errors,
                                      collapsed_point_option=Keep_collapsed_points, in_barriers=Input_Barrier_Layers)[0]

    # Process: Smooth Polygon (Smooth Polygon)
    Output_Smooth = ""
    arcpy.SmoothPolygon_cartography(
        in_features=Output_Simplify,
        out_feature_class=Output_Smooth,
        algorithm=Smoothing_Algorithm,
        tolerance=Smoothing_Tolerance,
        endpoint_option=Preserve_endpoint_for_rings,
        error_option=Handling_Topological_Errors_2_,
        in_barriers=Input_Barrier_Layers_2_)

    # Process: Buffer (Buffer)
    Output_Buffer = ""
    arcpy.Buffer_analysis(in_features=Output_Smooth,
                          out_feature_class=Output_Buffer,
                          buffer_distance_or_field=Distance_value_or_field_,
                          line_side=Side_Type,
                          line_end_type=End_Type,
                          dissolve_option=Dissolve_Type,
                          dissolve_field=Dissolve_Field_s_,
                          method=Method)

    # Process: Eliminate Polygon Part (Eliminate Polygon Part)
    arcpy.EliminatePolygonPart_management(
        in_features=Output_Buffer,
        out_feature_class=Output_Finished,
        condition=Condition,
        part_area=Area,
        part_area_percent=Percentage,
        part_option=Eliminate_contained_parts_only)
Example #7
0
    UncleanedPolygonVB = UncleanedPolygonVB_USER

#/cleaning of the valley bottom polygon
arcpy.AddMessage("Aggregating and Deleting Holes | CleanStep 1 - Step " +
                 str(ncurrentstep) + "/" + str(nstep))
AggregatedVB = arcpy.AggregatePolygons_cartography(
    UncleanedPolygonVB, "%scratchWorkspace%\\AggregatedVB", AggregationDist,
    MinimumArea, MinimumHoleSize, "NON_ORTHOGONAL")

if str(SmoothingVB) != "0":

    ncurrentstep += 1
    arcpy.AddMessage("Eliminating Polygon Parts | CleanStep 2 - Step " +
                     str(ncurrentstep) + "/" + str(nstep))
    EliminatedVB = arcpy.EliminatePolygonPart_management(
        AggregatedVB, "%scratchWorkspace%\\EliminatedVB", "AREA",
        MinimumHoleSize, "", "ANY")

    ncurrentstep += 1
    arcpy.AddMessage("Smoothing Valley Bottom | CleanStep 3 - Step " +
                     str(ncurrentstep) + "/" + str(nstep))
    VB = arcpy.SmoothPolygon_cartography(EliminatedVB, Output, "PAEK",
                                         SmoothingVB, "FIXED_ENDPOINT",
                                         "NO_CHECK")

else:
    ncurrentstep += 1
    arcpy.AddMessage("Eliminating Polygon Part | CleanStep 2 - Step " +
                     str(ncurrentstep) + "/" + str(nstep))
    VB = arcpy.EliminatePolygonPart_management(AggregatedVB, Output, "AREA",
                                               MinimumHoleSize, "", "ANY")
Example #8
0
def WVW(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld),
                                       str(city) + '_WaterWV.gdb')
    except:
        print 'WaterWV GDB already exists'
    workDir = str(workFld) + '/' + city + '_WaterWV.gdb'
    arcpy.env.workspace = workDir
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
    """ Split Raster Directory """
    if os.path.isdir(str(workFld) + '/' + city + '_Split') == True:
        pass
    else:
        os.makedirs(str(workFld) + '/' + city + '_Split')
    splitDir = str(workFld) + '/' + city + '_Split'
    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    print("Directory and Environment set-up complete...--" +
          time.strftime('%Y%m%d--%H%M%S'))

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_WaterWV_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        reportFile = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first line of report file """
        print 'Water Views Start Time: ' + time.asctime()
        reportFile.write(
            "Begin with EnviroAtlas 1-meter Land Cover for the EnviroAtlas community--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/LC'
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.overwriteOutput = True
        """-------- Reclassify LC into Binary Water ----------------------------- """
        if arcpy.Exists(str(freqDir) + '/WaterIO') == False:
            outReclass = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 0], [52, 0], [70, 0],
                                     [80, 0], [82, 0], [91, 0], [92, 0]]))
            outReclass.save(str(freqDir) + '/WaterIO')
            print("Reclassify to water binary complete...--" +
                  time.strftime('%Y%m%d--%H%M%S'))
            reportFile.write(
                "Reclassify the Land Cover into a Water binary. (Water - 10 = 1; All Else = 0).--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("WaterIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        else:
            print("Reclassify to water previously completed...--" +
                  time.strftime('%Y%m%d--%H%M%S'))
            reportFile.write(
                "Reclassify the Land Cover into a Water binary. (Water - 10 = 1; All Else = 0).--WaterIO"
                + '--\n')
        """-------- Create the Water Bodies Raster ------------------ """
        """ Create the Water Region Group """
        if arcpy.Exists('WatIO_300') == False:
            RG = arcpy.sa.RegionGroup(freqDir + '/WaterIO', 'EIGHT', 'WITHIN',
                                      'ADD_LINK')
            RG.save('WatIO_300')
            arcpy.AddField_management('WatIO_300', 'Count_2', 'DOUBLE')
            arcpy.CalculateField_management('WatIO_300', 'Count_2', '!COUNT!',
                                            'PYTHON')
            print("Water region groups created...--" +
                  time.strftime('%Y%m%d--%H%M%S'))
            reportFile.write(
                "Run the region group tool to group waterbodies together with options number of neighbors to use: EIGHT, zone grouping method: WITHIN, and ADD_LINK (add link field to output).--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("WaterRG--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')

        else:
            print("Water region groups previously created...--" +
                  time.strftime('%Y%m%d--%H%M%S'))
            reportFile.write(
                "Run the region group tool to group waterbodies together with options number of neighbors to use: EIGHT, zone grouping method: WITHIN, and ADD_LINK (add link field to output).--WaterRG"
                + '\n')
        """ Extract Water Bodies From the Region Group """
        EBA = arcpy.sa.ExtractByAttributes('WatIO_300',
                                           'Count_2 > 299 AND LINK = 1')
        EBA.save('WatBod_300')
        reportFile.write(
            "Extract by Attributes from the Region Group raster groups of pixels where Count > 299 and Link = 1.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Split the Raster As Needs, Process Each Piece ----------------- """
        """ Check if the raster should be split """
        columns = arcpy.GetRasterProperties_management(
            'WatBod_300', 'COLUMNCOUNT').getOutput(0)
        xsplit = int(float(columns) / 40000) + 1
        rows = arcpy.GetRasterProperties_management('WatBod_300',
                                                    'ROWCOUNT').getOutput(0)
        ysplit = int(float(rows) / 40000) + 1
        print("Determining if split is necessary...--" +
              time.strftime('%Y%m%d--%H%M%S'))
        """-------- If no split, run the analysis --------------------------------- """
        if xsplit * ysplit == 1:
            """ Convert Raster to Polygon """
            arcpy.RasterToPolygon_conversion('WatBod_300', 'Poly_WatBod',
                                             'NO_SIMPLIFY')
            reportFile.write("Convert the raster into polygons--" +
                             time.strftime('%Y%m%d--%H%M%S') + '--\n')
            print("\t Raster is small enough, split not necessary...--" +
                  time.strftime('%Y%m%d--%H%M%S'))
            """-------- If split, run the analysis on each piece and recombine --------- """
        else:
            """ Delete the raster, if necessary """
            xy = (xsplit * ysplit)
            for rast in range(xy):
                try:
                    arcpy.Delete_management(splitDir + '/watbod_' + str(rast))
                except:
                    pass
            try:
                arcpy.Delete_management(splitDir + '/watbod_300')
            except:
                pass
            """ Split the Raster """
            print("\t Raster is big, spliting into " + str(xsplit) +
                  " rows and " + str(ysplit) + " columns...--" +
                  time.strftime('%Y%m%d--%H%M%S'))
            arcpy.RasterToOtherFormat_conversion('WatBod_300', splitDir,
                                                 'GRID')
            arcpy.SplitRaster_management(splitDir + '/WatBod_300', splitDir,
                                         'WatBod_', 'NUMBER_OF_TILES', 'GRID',
                                         '',
                                         str(xsplit) + ' ' + str(ysplit))
            reportFile.write(
                "Split the raster into pieces for easier processing. The Python script determines the number of pieces based on the number of rows and columns in the raster where no piece can have a side larger than 40,000 cells.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ For each raster: """
            prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
            xy = xsplit * ysplit
            for Chunk in range(0, xy):
                try:
                    result = float(
                        arcpy.GetRasterProperties_management(
                            splitDir + '/WatBod_' + str(Chunk),
                            'MEAN').getOutput(0))
                    """ If the raster piece has data: """
                    if (result > 0):
                        """ Set Environments """
                        arcpy.env.snapRaster = freqDir + '/WaterIO'
                        arcpy.env.extent = freqDir + '/WaterIO'
                        """ Copy the piece back to the Working Directory """
                        arcpy.RasterToOtherFormat_conversion(
                            splitDir + '/WatBod_' + str(Chunk), workDir)
                        """ Convert Raster to Polygon """
                        arcpy.RasterToPolygon_conversion(
                            'WatBod_' + str(Chunk), 'Poly_' + str(Chunk),
                            'NO_SIMPLIFY')
                        print("Processed Chunk " + str(Chunk) + " / " +
                              str(xy) + "..." + time.asctime())
                    else:
                        pass
                except:
                    pass
            reportFile.write(
                "Convert the pieces individually into polygons then recombine them into one feature class.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            WVchunks = arcpy.ListFeatureClasses('Poly_*')
            """ Merge the polygons back together """
            arcpy.Merge_management(WVchunks, 'Poly_WatBod')
            print(
                "All raster chunks with data converted to polygons and merged...--"
                + time.strftime('%Y%m%d--%H%M%S'))
            reportFile.write("Merge the pieces back together.--" +
                             time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- End of Split Processing ---------------------------------------- """
        """ Buffer pieces by 50m """
        arcpy.Buffer_analysis('Poly_WatBod', 'WatView_Buff', '50 METERS',
                              'FULL', '', 'NONE')
        reportFile.write("Buffer the Water Bodies by 50 Meters.--" +
                         time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Project into Albers """
        arcpy.Project_management('WatView_Buff', 'WatView_Alb', prjfile)
        reportFile.write("Project the buffer polygons into Albers.--" +
                         time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/Dasy'
        arcpy.env.extent = freqDir + '/Dasy'
        """ Extract Daysmetric Pixels """
        EbM_P = arcpy.sa.ExtractByMask(freqDir + '/Dasy', 'WatView_Alb')
        EbM_P.save('Pop_WaterView')
        print("Dasy pixels extracted for all buffered water polygons...--" +
              time.strftime('%Y%m%d--%H%M%S'))
        reportFile.write(
            "Extract by mask the EnviroAtlas Dasymetric (2011/October 2015) pixels using the projected buffer to produce a raster showing population with potential views of water.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate the Extracted Dasy Population with Each CBG """
        arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                        'Pop_WaterView', 'BG_WWV', 'DATA',
                                        'SUM')
        reportFile.write(
            "Calculate Zonal Statistics as Table for the extracted dasymetrics with the zones being the 2010 block groups for the EnviroAtlas community.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Calculate Total Dasy Population, if necessary ------------------ """
        """ Use the existing data """
        fieldNames = [f.name for f in arcpy.ListFields(freqDir + '/BG_Alb')]
        if 'Dasy_Pop' in fieldNames:
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--Dasy_Pop"
                + '--\n')
            """ Create population data """
        else:
            arcpy.AddField_management(freqDir + '/BG_Alb', 'Dasy_Pop', 'LONG')
            arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                            freqDir + '/Dasy',
                                            freqDir + '/Dasy_ZS', '', 'SUM')
            arcpy.JoinField_management(freqDir + '/BG_Alb', 'bgrp',
                                       freqDir + '/Dasy_ZS', 'bgrp', ['SUM'])
            arcpy.CalculateField_management(freqDir + '/BG_Alb', 'Dasy_Pop',
                                            '!SUM!', 'PYTHON_9.3')
            arcpy.DeleteField_management(freqDir + '/BG_Alb', ['SUM'])
            arcpy.JoinField_management(freqDir + '/BG', 'bgrp',
                                       freqDir + '/BG_Alb', 'bgrp',
                                       ['Dasy_Pop'])
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("Dasy_Pop--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        """-------- Create Final Table --------------------------------------------- """
        arcpy.TableToTable_conversion(freqDir + '/BG_Alb', workDir, 'WatWV',
                                      '', 'bgrp')
        arcpy.DeleteField_management('WatWV', [
            'PLx2_Pop', 'PLx2_Pct', 'SUM_HOUSIN', 'NonWhite', 'LandA_M',
            'Density', 'under_1', 'under_1pct', 'under_13', 'under_13pc',
            'over_70', 'over_70pct', 'Shape_Length', 'Shape_Leng',
            'NonWhite_Pop', 'NonWt_Pct', 'Area_M', 'Shape_Le_1', 'Shape_Area',
            'ALAND', 'NonWhite_P', 'H_Income_M', 'State'
        ])
        WaterView = 'WatWV'
        reportFile.write(
            "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP and Dasy_Pop fields--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add fields to new table """
        arcpy.AddField_management(WaterView, 'WVW_Pop', 'LONG')
        arcpy.AddField_management(WaterView, 'WVW_Pct', 'FLOAT', 5, 2)
        reportFile.write(
            "Add fields to the new table for WVW_Pop (long), WVW_Pct (float).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join Each Table to the final table and WVW_Pop """
        arcpy.JoinField_management(WaterView, 'bgrp', 'BG_WWV', 'bgrp',
                                   ['SUM'])
        arcpy.CalculateField_management(WaterView, 'WVW_Pop', '!SUM!',
                                        'PYTHON_9.3')
        arcpy.MakeTableView_management(WaterView, 'WaterView_Tbl')
        arcpy.SelectLayerByAttribute_management('WaterView_Tbl',
                                                'NEW_SELECTION', 'SUM IS NULL')
        arcpy.CalculateField_management('WaterView_Tbl', 'WVW_Pop', 0,
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('WaterView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('WaterView_Tbl', 'SUM')
        reportFile.write(
            "Join the zonal statistics table with the new table to calculate the new fields: WVW_Pop = zonal statistics.SUM; remove join--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate WVW_Pct """
        arcpy.SelectLayerByAttribute_management('WaterView_Tbl',
                                                'NEW_SELECTION', 'Dasy_Pop >0')
        arcpy.CalculateField_management(
            'WaterView_Tbl', 'WVW_Pct',
            '"%.2f" % (float(!WVW_Pop!)/float(!Dasy_Pop!) * 100)',
            'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('WaterView_Tbl',
                                                'CLEAR_SELECTION')
        reportFile.write(
            "Calculate field WVW_Pct = WVW_Pop / Dasy_Pop * 100 (limited to 2 decimal places).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate NULL values, where applicable """
        arcpy.SelectLayerByAttribute_management('WaterView_Tbl',
                                                'NEW_SELECTION',
                                                'Dasy_Pop = 0')
        arcpy.CalculateField_management('WaterView_Tbl', 'WVW_Pct', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('WaterView_Tbl', 'WVW_Pop', '-99999',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('WaterView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('WaterView_Tbl', [
            'SUM_POP10', 'EAID', 'NonWhite', 'LandA_M', 'Density', 'Dasy_Pop',
            'SUM'
        ])
        print(
            "Statistics for all fields and buffered water bodies calculated...--"
            + time.strftime('%Y%m%d--%H%M%S'))
        reportFile.write(
            "Calculate fields where Dasy_Pop = 0: All Fields = -99999.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            arcpy.env.snapRaster = freqDir + '/LC'
            arcpy.env.extent = freqDir + '/LC'
            ReC = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
        """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
        """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            """ Identify BGs within 50m of the LC edge """
            print(
                "Land Cover does not extend beyond BG boundary, editing possibly erroneous BGs...--"
                + time.strftime('%Y%m%d--%H%M%S'))
            arcpy.Buffer_analysis(
                str(freqDir) + '/LC_Poly', 'LC_Poly_Minus50', '-50 meters')
            arcpy.MakeFeatureLayer_management('LC_Poly_Minus50', 'Minus50')
            arcpy.MakeFeatureLayer_management(freqDir + '/BG', 'BG')

            arcpy.SelectLayerByLocation_management('BG', 'COMPLETELY_WITHIN',
                                                   'Minus50', '',
                                                   'NEW_SELECTION', 'INVERT')

            bgValue = float(arcpy.GetCount_management('BG').getOutput(0))
            """ For all BGs too close to the LC edge, assign both fields a value of -99998 """
            if bgValue > 0:
                bgrps = []
                cursor = arcpy.SearchCursor('BG')
                for row in cursor:
                    value = row.getValue('bgrp')
                    bgrps.append(value)
                bgrps = list(set(bgrps))
                expression = ''
                for bgrp in bgrps:
                    expression = expression + " OR bgrp = '" + str(bgrp) + "'"
                expression = expression[4:]
                arcpy.SelectLayerByAttribute_management(
                    'WaterView_Tbl', 'NEW_SELECTION', expression)
                arcpy.CalculateField_management('WaterView_Tbl', 'WVW_Pop',
                                                '-99998', 'PYTHON_9.3')
                arcpy.CalculateField_management('WaterView_Tbl', 'WVW_Pct',
                                                '-99998', 'PYTHON_9.3')
                arcpy.SelectLayerByAttribute_management(
                    'WaterView_Tbl', 'CLEAR_SELECTION')
        reportFile.write(
            "Calculate Field for BGs within 50m of the edge of the land cover, WVW_Pop and WVW_Pct = -99998.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final table """
        arcpy.CopyRows_management('WaterView_Tbl', 'WaterWV_Fnl')

        try:
            arcpy.Delete_management(finalDir + '/' + str(city) + '_WaterWV')
        except:
            pass
        arcpy.TableToTable_conversion('WaterWV_Fnl', finalDir,
                                      city + '_WaterWV')
        allFields = [
            f.name
            for f in arcpy.ListFields(finalDir + '/' + city + '_WaterWV')
        ]
        for field in allFields:
            if field not in ['bgrp', 'OBJECTID', 'WVW_Pop', 'WVW_Pct']:
                arcpy.DeleteField_management(
                    finalDir + '/' + city + '_WaterWV', [field])
        reportFile.write(
            "Export the fields to be displayed in the EnviroAtlas to a final gdb table. WVW_Pop, WVW_Pct--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        print 'Window Views of Water End Time: ' + time.asctime() + '\n'

        #-------- COMPELETE LOGFILES ---------------------------------------------
        reportFile.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        reportFile.write("\nSomething went wrong.\n\n")
        reportFile.write("Pyton Traceback Message below:")
        reportFile.write(traceback.format_exc())
        reportFile.write("\nArcMap Error Messages below:")
        reportFile.write(arcpy.GetMessages(2))
        reportFile.write("\nArcMap Warning Messages below:")
        reportFile.write(arcpy.GetMessages(1))

        reportFile.write("\n\nEnded at " + time.asctime() + '\n')
        reportFile.write("\n---End of Log File---\n")

        if reportFile:
            reportFile.close()
Example #9
0
def rankPaths(source, pField, curSurface, outConnect, minWidth):
    arcpy.AddMessage('Generating ranked cost paths for ' + outConnect + '...')

    cList = []
    zList = []
    rList = []

    ##    # Append core areas to connected regions to connect regions that are bisected by source habitat
    ##
    ##    # Generate Minimum convex hull of connected areas
    ##    arcpy.MinimumBoundingGeometry_management(outConnect, "in_memory\\mcp", "CONVEX_HULL", "ALL")
    ##    arcpy.Clip_analysis(source, "in_memory\\mcp", "in_memory\\src_clp")
    ##
    ##    #Merge connected and source
    ##    arcpy.Merge_management(["in_memory\\src_clp", outConnect], "in_memory\\connect_merge")
    ##
    ##    #Dissolve merged connected patches
    ##    arcpy.Dissolve_management("in_memory\\connect_merge", "in_memory\\out_connect_merge", "", "", "SINGLE_PART", "")
    ##    outConnect = "in_memory\\out_connect_merge"

    # Set intersect tolerance to 3X link layer cell size to prevent Intersect from creating multiple line segments where slivers occur
    interTol = str(3 * int(arcpy.Describe(link).meanCellWidth))
    minWidth = 2 * minWidth
    cstSurface = arcpy.sa.FocalStatistics(curSurface,
                                          arcpy.sa.NbrCircle(minWidth, "Map"),
                                          "MEAN", "DATA")

    # If connected region is not empty, extract cost surface by connected region to limit analysis to connected region
    if len(connectList) > 0:
        cstSurface2 = arcpy.CopyRaster_management(cstSurface, "cstSurface2")
        arcpy.AddMessage('Extracting cost surface by connected area...')
        cstSurface = arcpy.gp.ExtractByMask_sa(cstSurface, outConnect,
                                               "cstSurf")
        cstSurface = arcpy.Describe(cstSurface).name
        cstSurface2 = arcpy.Describe(cstSurface2).name

    # Create line segment where source patches touch connected regions to use as sources for cost paths

    # Make sure inputs are in same projection

    sourceProjName = arcpy.Describe(source).spatialreference.name
    curProjName = arcpy.Describe(cstSurface).spatialreference.name

    if not sourceProjName == curProjName:
        arcpy.AddMessage("\tReprojecting source layer...")
        pSource = arcpy.Project_management(
            source, os.path.join(arcpy.env.scratchWorkspace, "reproj.shp"),
            cstSurface)
    else:
        pSource = source

##    # Add core ares back to current surfaces as zero cost regions
##    arcpy.env.cellSize = '"%s"' % arcpy.Describe(cstSurface).catalogPath
##    CellSize = str(arcpy.env.cellSize)
##    arcpy.PolygonToRaster_conversion(pSource, pField, "in_memory\\rast_source", "", "", CellSize)
##    no_null = arcpy.sa.Con(arcpy.sa.IsNull("in_memory\\rast_source"),0,1)
##    cstSurface = arcpy.sa.Con(no_null, 0, cstSurface, "VALUE = 1")
##    cstSurface2 = arcpy.sa.Con(no_null, 0, cstSurface2, "VALUE = 1")

    arcpy.AddMessage(
        '\tIntersecting source patches with connected area to create source regions...'
    )
    pSource = arcpy.EliminatePolygonPart_management(pSource,
                                                    "in_memory\\eliminate",
                                                    "PERCENT", "", 10,
                                                    "CONTAINED_ONLY")
    try:
        arcpy.Delete_management(
            os.path.join(arcpy.env.scratchWorkspace, "reproj.shp"))
    except:
        pass
    pSource = arcpy.Intersect_analysis([[pSource, 1], [outConnect, 1]],
                                       "in_memory\\intersect", "ALL", interTol,
                                       "LINE")
    pSource = arcpy.MultipartToSinglepart_management(pSource,
                                                     "in_memory\\multipart")
    pSource = arcpy.UnsplitLine_management(pSource, "in_memory\\unsplit",
                                           pField)
    pSource = arcpy.MakeFeatureLayer_management(pSource, "pSource")

    # Calculate least-cost path for each pair-wise combination of source patches
    l = getCombinations(source, pField)
    values = l[0]
    combs = l[1]

    # break combination and not connected lists into unique elements and create list of regions with no connections
    if len(connectList) > 0:
        theList = connectList
    else:
        theList = noConnectList

    c = list(set(chain.from_iterable(theList)))

    # Create patch regions and cost distance rasters for each unique value in source patches
    arcpy.AddMessage(
        '\tCreating patch regions and cost distance rasters for each unique value in source patches...'
    )
    for v in values:
        if v in c:
            v = str(int(v))
            arcpy.AddMessage('\t\tProcessing patch region ' + v + '...')
            arcpy.SelectLayerByAttribute_management(pSource, "NEW_SELECTION",
                                                    pField + " = " + v)
            arcpy.MakeFeatureLayer_management(pSource, "p_" + v)
            cd = arcpy.sa.CostDistance("p_" + v, cstSurface, "",
                                       os.path.join(workspace, "bklnk_" + v))
            arcpy.MakeRasterLayer_management(cd, "CostDist_" + v)

            if len(connectList) > 0:
                rd = arcpy.sa.CostDistance(
                    "p_" + v, cstSurface2, "",
                    os.path.join(workspace, "r_bklnk_" + v))
                arcpy.MakeRasterLayer_management(rd, "r_CostDist_" + v)

    # Create least-cost paths for each region pair in both directions
    arcpy.AddMessage(
        '\tGenerating least-cost path for each patch pair combination...')

    for c in combs:
        c1 = str(int(c[0]))
        c2 = str(int(c[1]))

        if c in theList:
            arcpy.AddMessage('\t\tCalculating least-cost path from region ' +
                             c1 + ' to region ' + c2 + '...')
            cp = arcpy.sa.CostPath("p_" + c1, "CostDist_" + c2, "bklnk_" + c2,
                                   "BEST_SINGLE", "FID")
            cp1 = arcpy.MakeRasterLayer_management(cp, "CP_" + c1 + "_" + c2)
            arcpy.AddMessage('\t\tCalculating least-cost path from region ' +
                             c2 + ' to region ' + c1 + '...')
            cp = arcpy.sa.CostPath("p_" + c2, "CostDist_" + c1, "bklnk_" + c1,
                                   "BEST_SINGLE", "FID")
            cp2 = arcpy.MakeRasterLayer_management(cp, "CP_" + c2 + "_" + c1)

            cList.append(str(cp1))
            cList.append(str(cp2))

        else:
            arcpy.AddWarning(
                '\t\tRegions ' + c1 + ' and ' + c2 +
                ' are not connected.  Skipping cost path for this region pair...'
            )

    # Create combined least-cost path polyline layer
    arcpy.AddMessage('\t\tMosaicing least-cost paths for region pairs...')
    arcpy.MosaicToNewRaster_management(cList, workspace, "lcp_mos", "", "", "",
                                       "1", "MAXIMUM")

    for c in cList:
        try:
            arcpy.Delete_management(c)
        except:
            pass

    arcpy.CalculateStatistics_management(os.path.join(workspace, "lcp_mos"))
    LCP = arcpy.sa.Con(os.path.join(workspace, "lcp_mos"), "1", "",
                       "VALUE > 0")

    arcpy.Delete_management(os.path.join(workspace, "lcp_mos"))

    # Create least-cost paths by zone
    arcpy.AddMessage(
        '\tGenerating least-cost paths  by zones for each patch pair combination...'
    )
    # Create least-cost paths for each region pair in both directions
    for c in combs:
        c1 = str(int(c[0]))
        c2 = str(int(c[1]))
        if c in theList:
            arcpy.AddMessage('\t\tCalculating least-cost path from region ' +
                             c1 + ' to region ' + c2 + '...')
            zp = arcpy.sa.CostPath("p_" + c1, "CostDist_" + c2, "bklnk_" + c2,
                                   "EACH_ZONE", "FID")
            zp1 = arcpy.MakeRasterLayer_management(zp, "ZP_" + c1 + "_" + c2)
            arcpy.AddMessage('\t\tCalculating least-cost path from region ' +
                             c2 + ' to region ' + c1 + '...')
            zp = arcpy.sa.CostPath("p_" + c2, "CostDist_" + c1, "bklnk_" + c1,
                                   "EACH_ZONE", "FID")
            zp2 = arcpy.MakeRasterLayer_management(zp, "ZP_" + c2 + "_" + c1)

            zList.append(str(zp1))
            zList.append(str(zp2))

    # Create combined least-cost path polyline layer
    arcpy.AddMessage('\t\tMosaicing least-cost paths for region zones...')
    if arcpy.Exists(os.path.join(workspace, "zcp_mos")):
        arcpy.Delete_management(os.path.join(workspace, "zcp_mos"))
    arcpy.MosaicToNewRaster_management(zList, workspace, "zcp_mos", "", "", "",
                                       "1", "MAXIMUM")

    for z in zList:
        try:
            arcpy.Delete_management(z)
        except:
            pass

    arcpy.CalculateStatistics_management(os.path.join(workspace, "zcp_mos"))
    ZCP = arcpy.sa.Con(os.path.join(workspace, "zcp_mos"), "2", "",
                       "VALUE > 0")

    # Create least-cost paths through compromised areas

    if len(connectList) > 0:
        # Create patch regions and cost distance rasters for each unique value in source patches
        arcpy.AddMessage('\tCalculating costs through restoration zones...')

        arcpy.AddMessage(
            '\tGenerating potential restoration paths for each patch pair combination...'
        )
        # Create least-cost paths for each region pair in both directions
        for c in combs:
            c1 = str(int(c[0]))
            c2 = str(int(c[1]))
            if c in theList:
                arcpy.AddMessage(
                    '\t\tCalculating least-cost path from region ' + c1 +
                    ' to region ' + c2 + '...')
                rp = arcpy.sa.CostPath("p_" + c1, "r_CostDist_" + c2,
                                       "r_bklnk_" + c2, "EACH_ZONE", "FID")
                rp1 = arcpy.MakeRasterLayer_management(rp,
                                                       "RP_" + c1 + "_" + c2)
                arcpy.AddMessage(
                    '\t\tCalculating least-cost path from region ' + c2 +
                    ' to region ' + c1 + '...')
                rp = arcpy.sa.CostPath("p_" + c2, "r_CostDist_" + c1,
                                       "r_bklnk_" + c1, "EACH_ZONE", "FID")
                rp2 = arcpy.MakeRasterLayer_management(rp,
                                                       "RP_" + c2 + "_" + c1)

                rList.append(str(rp1))
                rList.append(str(rp2))

        # Create combined least-cost path polyline layer
        arcpy.AddMessage('\t\tMosaicing least-cost paths for region zones...')
        if arcpy.Exists(os.path.join(workspace, "rcp_mos")):
            arcpy.Delete_management(os.path.join(workspace, "rcp_mos"))
        arcpy.MosaicToNewRaster_management(rList, workspace, "rcp_mos", "", "",
                                           "", "1", "MAXIMUM")

        for r in rList:
            try:
                arcpy.Delete_management(r)
            except:
                pass

        arcpy.CalculateStatistics_management(os.path.join(
            workspace, "rcp_mos"))
        RCP = arcpy.sa.Con(os.path.join(workspace, "rcp_mos"), "3", "",
                           "VALUE > 0")
        mList = [LCP, ZCP, RCP]

    else:
        mList = [LCP, ZCP]

    arcpy.AddMessage(
        '\tCombining least-cost paths by region and least-cost paths by region zones...'
    )
    arcpy.MosaicToNewRaster_management(mList, workspace, "lcp_mos", "", "", "",
                                       "1", "MINIMUM")
    LCP = arcpy.RasterToPolyline_conversion(os.path.join(workspace, "lcp_mos"),
                                            "LCP", "", "", "NO_SIMPLIFY")

    # Create a fieldinfo object to rename grid_code field
    fieldinfo = arcpy.FieldInfo()
    fieldinfo.addField("GRID_CODE", "PATH_RNK", "VISIBLE", "")
    outLCP = arcpy.MakeFeatureLayer_management(str(LCP), "outLCP", "", "",
                                               fieldinfo)
    # arcpy.CopyFeatures_management(outLCP, os.path.join(workspace, outLCP.shp))

    try:
        arcpy.Delete_management(os.path.join(workspace, "lcp_mos"))
        arcpy.Delete_management(os.path.join(workspace, "zcp_mos"))
        arcpy.Delete_management(os.path.join(workspace, "rcp_mos"))
        #arcpy.Delete_management("in_memory")
    except:
        pass
    return (outLCP)
Example #10
0
# --- Dissolve
arcpy.Dissolve_management(in_features="Autokast_Smooth",
                          out_feature_class="Autokast_Dissolve",
                          dissolve_field="gridcode",
                          statistics_fields="",
                          multi_part="MULTI_PART",
                          unsplit_lines="DISSOLVE_LINES")

print("Dissolved")

# Eliminate polygons smaller than 25000 square meters
arcpy.EliminatePolygonPart_management(
    in_features="Autokast_Dissolve",
    out_feature_class="Autokast_Smooth_Eliminate",
    condition="AREA",
    part_area="25000 SquareMeters",
    part_area_percent="0",
    part_option="ANY")

print("Eliminate clusters completed")

# Convert the file back to raster
arcpy.PolygonToRaster_conversion(in_features="Autokast_Smooth_Eliminate",
                                 value_field="gridcode",
                                 out_rasterdataset=os.path.join(
                                     rasterfolder, "AutokastToRaster.tif"),
                                 cell_assignment="CELL_CENTER",
                                 priority_field="NONE",
                                 cellsize="10")
Example #11
0
    arcpy.AddMessage("Created Polygons from XYZ points...")
    arcpy.AddMessage(outMBboundary)

except:
    # If an error occurred while running a tool, then print the messages.
    arcpy.AddMessage("Did Not Create Polygons from XYZ points...")
    arcpy.AddMessage(outMBboundary)
    print arcpy.GetMessages()

try:
    outMBboundaryElim = str(workspaceGDB + "\\" + DataType + "_Seg_" +
                            inSegment + "_" + inYear + "_bdy_elim")
    EliminationAreaExpression = str(EliminationArea + " SquareMeters")
    arcpy.EliminatePolygonPart_management(outMBboundary,
                                          outMBboundaryElim,
                                          "AREA",
                                          "500 SquareMeters",
                                          part_area_percent="0",
                                          part_option="ANY")
    arcpy.AddMessage("Eliminated holes in polygon...")
    arcpy.AddMessage(outMBboundaryElim)
    arcpy.AddMessage(" ")
except:
    # If an error occurred while running a tool, then print the messages.
    arcpy.AddMessage("Did Not eliminate holes in polygon...")
    arcpy.AddMessage(" ")
    print arcpy.GetMessages()

#
#############################################################
#############################################################
#
Example #12
0
def Guidos_Post(city, inDir, workFld):
    import traceback, time, arcpy, os, subprocess
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

#-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    if arcpy.Exists(str(workFld) + '/' + city + '_GUIDOS.gdb') == False:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_GUIDOS.gdb')
    else:
        pass
    workDir = str(workFld) + '/' + city + '_GUIDOS.gdb'
    arcpy.env.workspace = workDir

    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city +'_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'

    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'

    """ Split Raster Directory """
    if os.path.isdir(str(workFld) + '/' + city + '_Split') == True:
        pass
    else:
        os.makedirs(str(workFld) + '/' + city + '_Split')
    splitDir = str(workFld) + '/' + city + '_Split'

    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
	#-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
##        try:
        loglist = sorted(f for f in os.listdir(reportfileDir) if f.startswith(str(city) + '_Conn'))
        tmpName = loglist[-1]
##        except:
##            tmpName = city + '_Conn_' + time.strftime('%Y%m%d_%H-%M')  + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
			reportFile = open(reportfileName, 'a')
        except:
			reportFile = open(reportfileName, 'w')
			print 'No log for GUIDOS_Prep'

        try:
            loglist = sorted (f for f in os.listdir(reportfileDir) if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M')  + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
			ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'

        print 'Connectivity Start Time: ' + time.asctime()
        reportFile.write("For each piece, convert the raster from a tiled TIFF to a striped TIFF.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        #Figure out the correct UTM Zone
        prjNumb = arcpy.Describe(str(freqDir) + '/LC').spatialReference.name
        prjNumb = prjNumb[-3:]
        prjfileUTM = prjDir + '/NAD 1983 UTM Zone ' + prjNumb + '.prj'

        AllRast = 'yes'

        """ -------- RUN GUIDOS ---------------------------------------------- """
        arcpy.env.workspace = splitDir
    	guidlist = arcpy.ListRasters('S_FWW*')
    	""" For each raster piece, run GUIDOS """
    	for r in guidlist:
			guidexe = str(inDir) + '/mspa_win64.exe'
			inrast = str(splitDir) + '/' + str(r)
			outrast = 'G_' + str(r)
			outdir = str(splitDir) + '/'
			argu = (guidexe, '-i', inrast, '-o', outrast, '-eew', '30', '-internal', '0', '-odir', outdir, '-transition', '0')
			subprocess.call(argu)

			""" If GUIDOS worked, Reproject the rasters """
			if arcpy.Exists(outrast) == True:
				# Project the output raster
				oLeft = arcpy.GetRasterProperties_management(r, "LEFT").getOutput(0)
				oBottom = arcpy.GetRasterProperties_management(r, "BOTTOM").getOutput(0)
				nBottom = arcpy.GetRasterProperties_management(outrast, 'BOTTOM').getOutput(0)

				xMove = float(oLeft) + 0.5
				yMove = float(oBottom) - float(nBottom)

				arcpy.Shift_management(outrast, 'Sh_' + str(r), xMove, yMove, r)

				numb = r.replace('S_FWW_WB_', '')

				arcpy.env.extent = "MAXOF"
				arcpy.env.snapRaster = str(splitDir) + '/' + str(r)
				descLC = arcpy.Describe(str(freqDir) + '/LC')
				arcpy.ProjectRaster_management('Sh_' + str(r), 'Conn_' + str(numb), descLC.spatialReference, '', '', '', '', descLC.spatialReference)
			else:
				""" If GUIDOS didn't work, quit after loop """
				AllRast = 'no'
				print str(outrast) + ' not run properly'

        reportFile.write("For each piece, run the GUIDOS (Graphical User Interface for the Description of image Objects and their Shapes) v2.1 MSPA (Morphological Spatial Pattern Analysis) Standalone Tool (http://forest.jrc.ec.europa.eu/download/software/guidos) using 8 neighbor connectivity, 30 pixel edge, transition off, and intext off.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        reportFile.write("Project each GUIDOS piece into UTM, shifting to the location of the original piece.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        """-------- If all the GUIDOS Rasters worked, Post-Process ------------ """
        if AllRast == 'yes':
            """-------- Split the Raster As Needs, Process Each Piece ----------------- """
            """ Check if the raster should be split """
            columns = arcpy.GetRasterProperties_management(splitDir + '/FWW_WB.tif', 'COLUMNCOUNT').getOutput(0)
            xsplit = int(float(columns) / 8000) + 1
            rows = arcpy.GetRasterProperties_management(splitDir + '/FWW_WB.tif', 'ROWCOUNT').getOutput(0)
            ysplit = int (float(rows) / 8000) + 1

            """-------- If no split, run the analysis --------------------------------- """
            if xsplit*ysplit == 1:
                """ Copy Raster """
                arcpy.CopyRaster_management(splitDir + '/Conn.TIF', workDir + '/Conn')
                reportFile.write("Copy Tiff into Working GDB.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

                """-------- If split, run the analysis on each piece and recombine --------- """
            else:
                """ Set Environments """
                arcpy.env.extent = 'FWW_WB.tif'
                arcpy.env.snapRaster = 'FWW_WB.tif'

                """ Split the Raster """
                arcpy.SplitRaster_management('FWW_WB.tif', splitDir, 'NoO_WB_', 'NUMBER_OF_TILES', 'TIFF', '', str(xsplit) + ' ' + str(ysplit), '',  '', '')
                reportFile.write("Split the reclassified land cover into the same pieces as previous but with no overlap.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

                """ Clip the overlap off the tiles """
                pieces = int(xsplit) * int(ysplit)
                for tiles in range(int(pieces)):
                    if arcpy.Exists('NoO_WB_' + str(tiles) + '.TIF') == True:
                        """ Set Environments """
                        arcpy.env.extent = 'NoO_WB_' + str(tiles) + '.TIF'
                        arcpy.env.snapRaster = 'NoO_WB_' + str(tiles) + '.TIF'

                        """ Extract the Area of Interest """
                        EbM = arcpy.sa.ExtractByMask('Conn_' + str(tiles) + '.TIF', 'NoO_WB_' + str(tiles) + '.TIF')
                        EbM.save(workDir + '/Conn_' + str(tiles))
                    else:
                        pass
                reportFile.write("Clip each GUIDOS output to the corresponding piece of the second raster split to eliminate overlap.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

                """ Set Environments """
                arcpy.env.extent = workDir + '/FWW_WB'
                arcpy.env.snapRaster = workDir + '/FWW_WB'
                arcpy.env.workspace = workDir

                """ Mosaic tiles together """
                RastList = arcpy.ListRasters('Conn_*')
                arcpy.MosaicToNewRaster_management(RastList, workDir, 'Conn', '', '8_BIT_UNSIGNED', 1, 1, '', '')
                reportFile.write("Mosaic all of the clipped GUIDOS output tiles into one raster.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            """ Burn water into the Output Raster """
            watercon = arcpy.sa.Con(freqDir + '/LC', 10, 'Conn', 'value = 10')
            watercon.save('Conn_WithWat')
            reportFile.write("Using the original land cover, burn water pixels into raster using a conditional statement if Land Cover Value = 10; for true: 10; for false: GUIDOS raster.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            """--------- Clip the EA Boundaries to the County Lines, if necessary ----- """
            if arcpy.Exists(str(freqDir) + '/Bnd_Cty') == False:
                """ Copy Counties to Frequent and Project """
                arcpy.MakeFeatureLayer_management(str(inDir) + '/Input.gdb/Counties_Alb', 'Cty')
                arcpy.SelectLayerByLocation_management('Cty', 'CONTAINS', 'BG_Alb', '', 'NEW_SELECTION')
                arcpy.FeatureClassToFeatureClass_conversion(str(inDir) + '/Input.gdb/Counties_Alb', str(freqDir), 'Counties_Alb')
                arcpy.SelectLayerByAttribute_management('Cty', 'CLEAR_SELECTION')
                descLC = arcpy.Describe(str(freqDir) + '/LC')
                arcpy.Project_management('Counties_Alb', 'Counties', descLC.spatialReference)

                """ Clip Boundary to County Lines """
                arcpy.Clip_analysis(str(freqDir) + '/Bnd', str(freqDir) + '/Counties', str(freqDir) + '/Bnd_Cty')
                reportFile.write("Clip the EnviroAtlas Community Boundary to the county lines for the community to limit the output to land area.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
                ReuseRF.write("Bnd_Cty--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            else:
                reportFile.write("Clip the EnviroAtlas Community Boundary to the county lines for the community to limit the output to land area.--Bnd_Cty" + '--\n')

        	"""-------- Check that the Analysis Area is covered by the LC -------------- """
        	""" Create a Polygon Version of the LC """
            if arcpy.Exists(freqDir + '/LC_Poly') == False:
                arcpy.env.snapRaster = str(freqDir) + '/LC'
                arcpy.env.extent = str(freqDir) + '/LC'
                ReC = arcpy.sa.Reclassify(str(freqDir) + '/LC', 'Value', arcpy.sa.RemapValue([[0,0],[10,1],[20,1],[21,1],[22,1],[30,1],[40,1],[52,1],[70,1],[80,1],[82,1],[91,1],[92,1]]))
                ReC.save(str(freqDir) + '/AreaIO')
                arcpy.RasterToPolygon_conversion(str(freqDir) + '/AreaIO', str(freqDir) + '/LC_Poly', 'SIMPLIFY')
                arcpy.env.extent = workDir + '/FWW_WB'
                arcpy.env.snapRaster = workDir + '/FWW_WB'

            """ Buffer the LC Polygon by -500m """
            if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
    			arcpy.Buffer_analysis(str(freqDir) + '/Bnd_Cty', str(freqDir) + '/Bnd_Cty_500m', '500 meters')
    			arcpy.EliminatePolygonPart_management(str(freqDir) + '/Bnd_Cty_500m', str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30', 'CONTAINED_ONLY')
    			arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
    			arcpy.Rename_management(str(freqDir) + '/Bnd_Cty_500m_EP', str(freqDir) + '/Bnd_Cty_500m')

            """ Identify whether LC is large enough """
            arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
            arcpy.MakeFeatureLayer_management(str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

            arcpy.SelectLayerByLocation_management('BC_500lyr', 'COMPLETELY_WITHIN', 'LClyr', '', 'NEW_SELECTION')
            bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
            arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')

            """ If the LC isn't large enough, edit erroneous BGS """
            if bigEnough == 0:
    			arcpy.Clip_analysis(freqDir + '/Bnd_Cty', freqDir + '/LC_Poly', 'Bnd_Cty_LC')
    			reportFile.write("Because the community boundary extends beyond the Land Cover, clip the boundary to the land cover.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            """ Clip the Raster to Bnd_Cty """
            try:
    			EbM = arcpy.sa.ExtractByMask('Conn_WithWat', 'Bnd_Cty_LC')
            except:
    			EbM = arcpy.sa.ExtractByMask('Conn_WithWat', freqDir + '/Bnd_Cty')
            EbM.save('Conn_Bnd')
            reportFile.write("Extract by Mask the area of the projected raster that is within the clipped EnviroAtlas Community Boundary.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            """ Add Colormap """
            arcpy.AddColormap_management('Conn_Bnd', '', str(inDir) + '/Templates/GUIDOS2.clr')

            """ Convert to TIFF for sharing """
            arcpy.CopyRaster_management('Conn_Bnd', splitDir + '/' + str(city) + '_Conn.tif')
            reportFile.write("Add a default GUIDOS colormap to the final raster from any of the original GUIDOS output tiles.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            """ Convert to Web Mercator """
            prjfileWM = prjDir + '/WGS 1984 Web Mercator (auxiliary sphere).prj'
            if arcpy.Exists(freqDir + '/Bnd_WM') == False:
                arcpy.Project_management(freqDir + '/Bnd', freqDir + '/Bnd_WM', prjfileWM)
            arcpy.env.extent = freqDir + '/Bnd_WM'
            arcpy.ProjectRaster_management('Conn_Bnd', 'Conn_WM', prjfileWM)
            reportFile.write("Convert raster into TIFF format for distribution.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            """ Copy to Final Directory """
            arcpy.CopyRaster_management('Conn_WM', finalDir + '/' + str(city) + '_Conn')
    ##            reportFile.write("Step 15--Copy to Final GDB--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

            print 'GUIDOS End Time: ' + time.asctime()

        else:
            print 'Some MSPA functions did not complete. Please run manually.'
        #-------------------------------------------------------------------------

        #-------- COMPELETE LOGFILES ---------------------------------------------
        reportFile.close()
    	ReuseRF.close()

	#-----------------------------------------------------------------------------
    # END ANALYSIS
	#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        reportFile.write("\nSomething went wrong.\n\n")
        reportFile.write("Pyton Traceback Message below:")
        reportFile.write(traceback.format_exc())
        reportFile.write("\nArcMap Error Messages below:")
        reportFile.write(arcpy.GetMessages(2))
        reportFile.write("\nArcMap Warning Messages below:")
        reportFile.write(arcpy.GetMessages(1))

        reportFile.write( "\n\nEnded at " + time.asctime() + '\n')
        reportFile.write("\n---End of Log File---\n")

        if reportFile:
            reportFile.close()
Example #13
0
def WVT(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_TreeWV.gdb')
    except:
        print 'TreeWV GDB already exists'
    workDir = str(workFld) + '/' + city + '_TreeWV.gdb'
    arcpy.env.workspace = workDir
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
    """ Split Raster Directory """
    if os.path.isdir(str(workFld) + '/' + city + '_Split') == True:
        pass
    else:
        os.makedirs(str(workFld) + '/' + city + '_Split')
    splitDir = str(workFld) + '/' + city + '_Split'
    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_TreeWV_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        reportFile = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first line of report file """
        print 'Window Views of Trees Start Time: ' + time.asctime()
        reportFile.write(
            "Begin with EnviroAtlas 1-meter Land Cover for the EnviroAtlas community--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/LC'
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.overwriteOutput = True
        """-------- Reclassify LC into Binary Forest ----------------------------- """
        if arcpy.Exists(str(freqDir) + '/MForestIO') == False:
            outReclass = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 1], [52, 0], [70, 0],
                                     [80, 0], [82, 1], [91, 1], [92, 0]]))
            outReclass.save(str(freqDir) + '/MForestIO')
            reportFile.write(
                "Reclassify the Land Cover into a Forest binary REPLACE-MFE" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            print("Forest area reclassified to binary raster..." +
                  time.asctime())
            ReuseRF.write("MForestIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')

        else:
            print("Forest binary raster already exists..." + time.asctime())
            reportFile.write(
                "Reclassify the Land Cover into a Forest binary REPLACE-MFE--MForestIO"
                + '--\n')
        """-------- Create 50m Moving Window ------------------------------------- """
        MW = arcpy.sa.FocalStatistics(freqDir + '/MForestIO',
                                      arcpy.sa.NbrCircle(50, 'CELL'), 'SUM',
                                      'NODATA')
        MW.save('MFor_50C')
        print("Moving window complete..." + time.asctime())
        reportFile.write(
            "Run Focal Statistics on the Forest Binary Raster with a circular window of 50 meters and statistics = SUM.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Reclassify Moving Window into Trees/NoTrees ------------------- """
        ReC = arcpy.sa.Reclassify(
            'MFor_50C', 'Value',
            arcpy.sa.RemapRange([[0, 0.99999, 1], [0.99999, 10300, 0]]))
        ReC.save('NoForView')
        print(
            "Moving window completed and reclassified to tree / no trees..." +
            time.asctime())
        reportFile.write(
            "Reclassify the Focal Statistics into Forest (>0 -> 0) or No Forest (0 -> 1).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Split the Raster As Needs, Process Each Piece ----------------- """
        """ Check if the raster should be split """
        columns = arcpy.GetRasterProperties_management(
            'NoForView', 'COLUMNCOUNT').getOutput(0)
        xsplit = int(float(columns) / 20000) + 1
        rows = arcpy.GetRasterProperties_management('NoForView',
                                                    'ROWCOUNT').getOutput(0)
        ysplit = int(float(rows) / 20000) + 1
        """-------- If no split, run the analysis --------------------------------- """
        if xsplit * ysplit == 1:
            """ Convert Raster to Polygon """
            arcpy.RasterToPolygon_conversion('NoForView', 'NFVP_Whole',
                                             'NO_SIMPLIFY')
            """ Extract areas with no forest in 50m """
            arcpy.Select_analysis('NFVP_Whole', 'NFVS_Whole', 'gridcode=1')
            reportFile.write(
                "Convert the raster into a polygon and select the features where gridcode = 1.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Project into Albers for use with Dasymetric """
            arcpy.Project_management('NFVS_Whole', 'NFVA_Whole', prjfile)
            print("Raster small enough, carry on..." + time.asctime())
            reportFile.write("Convert the polygons into Albers projection.--" +
                             time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """-------- If split, run the analysis on each piece and recombine --------- """
        else:
            """ Delete the raster, if necessary """
            xy = (xsplit * ysplit)
            for rast in range(xy):
                try:
                    arcpy.Delete_management(splitDir + '/nfvsp_' + str(rast))
                except:
                    pass
            try:
                arcpy.Delete_management(splitDir + '/noforview')
            except:
                pass
            """ Split the Raster """
            arcpy.RasterToOtherFormat_conversion('NoForView', splitDir, 'GRID')
            print("Raster too big, splitting into " + str(xsplit) +
                  " rows and " + str(ysplit) + " columns..." + time.asctime())
            arcpy.SplitRaster_management(splitDir + '/NoForView', splitDir,
                                         'NFVSp_', 'NUMBER_OF_TILES', 'GRID',
                                         '',
                                         str(xsplit) + ' ' + str(ysplit))
            reportFile.write(
                "Split the raster into pieces for easier processing. The Python script determines the number of pieces based on the number of rows and columns in the raster where no piece can have a side larger than 20,000 cells--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ For each raster: """
            prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
            xy = (xsplit * ysplit)
            for Chunk in range(0, xy):
                try:
                    result = float(
                        arcpy.GetRasterProperties_management(
                            splitDir + '/NFVSp_' + str(Chunk),
                            'MEAN').getOutput(0))
                    """ If the raster piece has data: """
                    if (result > 0):
                        """ Set Environments """
                        arcpy.env.snapRaster = freqDir + '/MForestIO'
                        arcpy.env.extent = freqDir + '/MForestIO'
                        """ Copy the piece back to the Working Directory """
                        arcpy.RasterToOtherFormat_conversion(
                            splitDir + '/NFVSp_' + str(Chunk), workDir)
                        """ Convert Raster to Polygon """
                        arcpy.RasterToPolygon_conversion(
                            'NFVSp_' + str(Chunk), 'NFVP_' + str(Chunk),
                            'NO_SIMPLIFY')
                        """ Extract areas with no forest in 50m """
                        arcpy.Select_analysis('NFVP_' + str(Chunk),
                                              'NFVS_' + str(Chunk),
                                              'gridcode=1')
                        """ Project into Albers for use with Dasymetric """
                        arcpy.Project_management('NFVS_' + str(Chunk),
                                                 'NFVA_' + str(Chunk), prjfile)
                        print("Chunk " + str(Chunk) + " / " + str(xy) +
                              " processed..." + time.asctime())
                except:
                    pass
            reportFile.write(
                "For each piece, convert the raster into a polygon and select the features where gridcode = 1--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            reportFile.write(
                "For each piece, convert the polygons into Albers projection.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Combine the resulting polygons """
            NFVchunks = arcpy.ListFeatureClasses('NFVA_*')
            arcpy.Merge_management(NFVchunks, workDir + '/NFVA_Whole')
            print("All chunks remerged..." + time.asctime())
            reportFile.write(
                "Merge all of the projected polygons together.--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/Dasy'
        arcpy.env.extent = freqDir + '/Dasy'
        """-------- End of Split Processing ---------------------------------------- """
        """ Extract Dasymetric Pixels where there is no forest in 50m """
        EbM = arcpy.sa.ExtractByMask(freqDir + '/Dasy', 'NFVA_Whole')
        EbM.save('Pop_NoForView')
        reportFile.write(
            "Extract by Mask the EnviroAtlas Dasymetric (2011/October 2015) pixels within the polygons--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate the Extracted Dasy Population with Each CBG """
        arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                        'Pop_NoForView', 'BG_TWV', 'DATA',
                                        'SUM')
        reportFile.write(
            "Calculate Zonal Statistics as Table for the extracted dasymetrics with the zones being the 2010 block groups for the EnviroAtlas community.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Calculate Total Dasy Population, if necessary ------------------ """
        """ Use the existing data """
        fieldNames = [f.name for f in arcpy.ListFields(freqDir + '/BG_Alb')]
        if 'Dasy_Pop' in fieldNames:
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--Dasy_Pop"
                + '--\n')
            """ Create population data """
        else:
            arcpy.AddField_management(freqDir + '/BG_Alb', 'Dasy_Pop', 'LONG')
            ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                   freqDir + '/Dasy', freqDir + '/Dasy_ZS', '',
                                   'SUM')
            arcpy.JoinField_management(freqDir + '/BG_Alb', 'bgrp',
                                       freqDir + '/Dasy_ZS', 'bgrp', ['SUM'])
            arcpy.CalculateField_management(freqDir + '/BG_Alb', 'Dasy_Pop',
                                            '!SUM!', 'PYTHON_9.3')
            arcpy.DeleteField_management(freqDir + '/BG_Alb', ['SUM'])
            arcpy.JoinField_management(freqDir + '/BG', 'bgrp',
                                       freqDir + '/BG_Alb', 'bgrp',
                                       ['Dasy_Pop'])
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("Dasy_Pop--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        """-------- Create Final Table --------------------------------------------- """
        arcpy.TableToTable_conversion(freqDir + '/BG_Alb', workDir, 'TreeWV',
                                      '', 'bgrp')
        arcpy.DeleteField_management('TreeWV', [
            'PLx2_Pop', 'PLx2_Pct', 'SUM_HOUSIN', 'NonWhite', 'LandA_M',
            'Density', 'under_1', 'under_1pct', 'under_13', 'under_13pc',
            'over_70', 'over_70pct', 'Shape_Length', 'Shape_Leng',
            'NonWhite_Pop', 'NonWt_Pct', 'Area_M', 'Shape_Le_1', 'Shape_Area',
            'ALAND', 'NonWhite_P', 'H_Income_M', 'State'
        ])
        TreeView = 'TreeWV'
        reportFile.write(
            "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP and Dasy_Pop fields--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add fields to new table """
        arcpy.AddField_management(TreeView, 'WVT_Pop', 'LONG')
        arcpy.AddField_management(TreeView, 'WVT_Pct', 'FLOAT', 5, 2)
        reportFile.write(
            "Add fields to the new table for WVT_Pop (long), WVT_Pct (float).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join Each Table to the final table and WVW_Pop """
        arcpy.JoinField_management(TreeView, 'bgrp', 'BG_TWV', 'bgrp', ['SUM'])
        arcpy.CalculateField_management(TreeView, 'WVT_Pop', '!SUM!',
                                        'PYTHON_9.3')
        arcpy.MakeTableView_management(TreeView, 'TreeView_Tbl')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION', 'SUM IS NULL')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop', 0,
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('TreeView_Tbl', 'SUM')
        reportFile.write(
            "Join the zonal statistics table with the new table to calculate the new fields: WVT_Pop = zonal statistics.SUM; remove join--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate WVT_Pct """
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION', 'Dasy_Pop >0')
        arcpy.CalculateField_management(
            'TreeView_Tbl', 'WVT_Pct',
            '"%.2f" % (float(!WVT_Pop!)/float(!Dasy_Pop!) * 100)',
            'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        reportFile.write(
            "Calculate field WVT_Pct = WVT_Pop / Dasy_Pop * 100 (limited to 2 decimal places).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate NULL values, where applicable """
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION',
                                                'Dasy_Pop = 0')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pct', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop', '-99999',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('TreeView_Tbl', [
            'SUM_POP10', 'EAID', 'NonWhite', 'LandA_M', 'Density', 'Dasy_Pop',
            'SUM'
        ])
        print("Dasy raster summarized to BGs and stats calculated..." +
              time.asctime())
        reportFile.write(
            "Calculate fields where Dasy_Pop = 0: All Fields = -99999.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            arcpy.env.extent = freqDir + '/LC'
            arcpy.env.snapRaster = freqDir + '/LC'
            ReC = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
        """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
        """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            """ Identify BGs within 50m of the LC edge """
            arcpy.Buffer_analysis(
                str(freqDir) + '/LC_Poly', 'LC_Poly_Minus50', '-50 meters',
                'FULL', 'FLAT', 'ALL')
            arcpy.MakeFeatureLayer_management('LC_Poly_Minus50', 'Minus50')
            arcpy.MakeFeatureLayer_management(freqDir + '/BG', 'BG')

            arcpy.SelectLayerByLocation_management('BG', 'COMPLETELY_WITHIN',
                                                   'Minus50', '',
                                                   'NEW_SELECTION', 'INVERT')

            bgValue = float(arcpy.GetCount_management('BG').getOutput(0))
            print("LC extends beyond BG boundary, carry on..." +
                  time.asctime())
            """ For all BGs too close to the LC edge, assign both fields a value of -99998 """
            if bgValue > 0:
                bgrps = []
                cursor = arcpy.SearchCursor('BG')
                for row in cursor:
                    value = row.getValue('bgrp')
                    bgrps.append(value)
                bgrps = list(set(bgrps))
                expression = ''
                for bgrp in bgrps:
                    expression = expression + " OR bgrp = '" + str(bgrp) + "'"
                expression = expression[4:]
                arcpy.SelectLayerByAttribute_management(
                    'TreeView_Tbl', 'NEW_SELECTION', expression)
                arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop',
                                                '-99998', 'PYTHON_9.3')
                arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pct',
                                                '-99998', 'PYTHON_9.3')
                arcpy.SelectLayerByAttribute_management(
                    'TreeView_Tbl', 'CLEAR_SELECTION')
            print(
                "LC doesn't extend beyond BGs, removing border BGs from analysis..."
                + time.asctime())
            reportFile.write(
                "Calculate Field for BGs within 50m of the edge of the land cover, WVT_Pop and WVW_Pct = -99998.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final table """
        arcpy.CopyRows_management('TreeView_Tbl', 'TreeWV_Fnl')
        try:
            arcpy.Delete_management(finalDir + '/' + str(city) + '_TreeWV')
        except:
            pass
        arcpy.TableToTable_conversion('TreeWV_Fnl', finalDir, city + '_TreeWV')
        allFields = [
            f.name for f in arcpy.ListFields(finalDir + '/' + city + '_TreeWV')
        ]
        for field in allFields:
            if field not in ['bgrp', 'OBJECTID', 'WVT_Pop', 'WVT_Pct']:
                arcpy.DeleteField_management(finalDir + '/' + city + '_TreeWV',
                                             [field])
            print 'Window Views of Trees End Time: ' + time.asctime() + '\n'
        reportFile.write(
            "Export the fields to be displayed in the EnviroAtlas to a final gdb table. WVT_Pop, WVT_Pct--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- COMPELETE LOGFILES ---------------------------------------------
        reportFile.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        reportFile.write("\nSomething went wrong.\n\n")
        reportFile.write("Pyton Traceback Message below:")
        reportFile.write(traceback.format_exc())
        reportFile.write("\nArcMap Error Messages below:")
        reportFile.write(arcpy.GetMessages(2))
        reportFile.write("\nArcMap Warning Messages below:")
        reportFile.write(arcpy.GetMessages(1))

        reportFile.write("\n\nEnded at " + time.asctime() + '\n')
        reportFile.write("\n---End of Log File---\n")

        if reportFile:
            reportFile.close()
Example #14
0
    outF = a* (Raster(ACC_BLC)**b)*100
    outF.save(WAT_FL)

    #Assigning thr same stream water energy level for each hydrologically connected cell
    outW = Watershed(FD, WAT_FL,  "VALUE")
    outW.save(WAT_HGD)
    
    #outCon = SetNull (DEM_DIFF, 1,  "VALUE < %f" % bl_tresh )
    arcpy.AddMessage('- Delineating floodplain polygon...')
    #Subtracting the terrain elevation to the water energy levels
    outCon = Con(Raster(DEM_DIFF)<= Raster(WAT_HGD) ,1 )
    outCon.save(FPL_GRD)

    #Creating the polyogon from the raster
    arcpy.RasterToPolygon_conversion(FPL_GRD, FPL1,"SIMPLIFY")
    arcpy.EliminatePolygonPart_management(FPL1, FPL2, "AREA", cellarea*10000, "", "CONTAINED_ONLY")
    arcpy.Dissolve_management(FPL2, FPL)

    #Assigning the Leopold parameters
    arcpy.AddField_management(FPL, "AREA", "float")
    arcpy.CalculateField_management(FPL, "AREA", "!shape.area@squaremeters!", "PYTHON")
    arcpy.AddField_management(FPL, "a", "float")
    arcpy.CalculateField_management(FPL, "a", "%f" %a, "PYTHON")
    arcpy.AddField_management(FPL, "b", "float")
    arcpy.CalculateField_management(FPL, "b", "%f" %b, "PYTHON")

    #Splitting the floodplain poligon for each stream order
    arcpy.Clip_analysis(WAT_SORD, FPL, FPL_ORD)
    arcpy.AddField_management(FPL_ORD, "AREA", "float")
    arcpy.CalculateField_management(FPL_ORD, "AREA", "!shape.area@squaremeters!", "PYTHON")
    
Example #15
0
#inFile = "C:\Users\hostx009\Documents\temp"
#outFile = "C:\\Users\\hostx009\\Documents\\temp\\temp.shp"

# Local variables:
temp1 = "C:\\Users\\hostx009\\Documents\\ArcGIS\\Default1.gdb\\polyMerge_temp1"
temp2 = "C:\\Users\\hostx009\\Documents\\ArcGIS\\Default1.gdb\\polyMerge_temp2"

arcpy.env.workspace = inFile
List = arcpy.ListFeatureClasses("*clean.shp")

# Process: Merge
arcpy.Merge_management(List, temp1)

# Process: Eliminate Polygon Part
arcpy.EliminatePolygonPart_management(temp1, temp2, "AREA",
                                      "5000 SquareMeters", "",
                                      "CONTAINED_ONLY")

# Process: add z field
arcpy.AddField_management(temp2, "z", "LONG", "", "", "", "", "NULLABLE",
                          "NON_REQUIRED", "")

# Process: Calculate Field
arcpy.CalculateField_management(temp2, "z", strata, "VB", "")

# Process: Polygon to raster
arcpy.PolygonToRaster_conversion(temp2, "z", outFile, "CELL_CENTER", "NONE",
                                 "1")

# # Process: Delete
arcpy.Delete_management(temp1, "")
Example #16
0
# Use cursor to populate parts and rings
fields = ["FID", "shape@", "parts", "rings"]
with arcpy.da.UpdateCursor(zones_eliminated2, fields) as cursor:
    for row in cursor:
        shape = row[1]
        parts = shape.partCount
        rings = shape.boundary().partCount

        row[2] = parts
        row[3] = rings

        cursor.updateRow(row)

# Eliminate polygon part
microzones_no_rings = arcpy.EliminatePolygonPart_management(
    zones_eliminated2, os.path.join(temp_dir, 'microzones_no_rings.shp'),
    'PERCENT', "", 50)

# Get filled zones
filled_zones = arcpy.MakeFeatureLayer_management(microzones_no_rings, 'zones')
query = """"rings" > 1"""
arcpy.SelectLayerByAttribute_management(filled_zones, "NEW_SELECTION", query)

# Erase zones with rings
microzones_rings_erased = arcpy.Erase_analysis(
    zones_eliminated2, filled_zones, os.path.join(temp_dir,
                                                  'zones_erased.shp'))

# add missing zones back
merged_zones = arcpy.Merge_management([microzones_rings_erased, filled_zones],
                                      os.path.join(temp_dir,
Example #17
0
def freq(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Report File Directory """
    try:
        os.makedirs(str(workFld) + '/Logs')
    except:
        pass
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_Freq.gdb')
    except:
        pass
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Current Workspace """
    workDir = freqDir
    """ Final Geodatabase """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_Final.gdb')
    except:
        pass
    finDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Dasymetric Directory """
    dasyDir = str(inDir) + '/Input.gdb/Dasy_10232015'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_BG__' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        BGRF = open(reportfileName, 'w')

        tmpName = city + '_BG_Pop_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        BG_PopRF = open(reportfileName, 'w')

        tmpName = city + '_Bnd_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        BndRF = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first lines of report files """
        print 'Frequent Start Time: ' + time.asctime()
        BGRF.write(
            "Obtain 2010 Urban Areas Polygon File, 2000 Urban Areas Polygon File, 2010 Block Groups, and 2010 Blocks from the US Census Bureau along with associated population tables.--201203--\n"
        )
        BGRF.write(
            "Join the population tables with the associated blocks and block groups.--201203--\n"
        )
        BGRF.write(
            "Clip blocks to the 2010 Urban Area for the EnviroAtlas city.--201203--\n"
        )
        BGRF.write(
            "Summarize the block population by block group in a new table; urban areas are defined using blocks, so this will determine the amount of people within each block group who are within the defined urban area.--201203--\n"
        )
        BGRF.write(
            "Join the summarized block population table with the block groups polygon file.--201203--\n"
        )
        BGRF.write(
            "Calculate the percentage of the block group population that is within the urban area: [summarized block population by block group]/[total block group population] * 100--201203--\n"
        )
        BGRF.write(
            "Extract the block groups with greater than or equal to 50% of their population within the urban area to a new feature class.--201203--\n"
        )
        BGRF.write(
            "Append all block groups to the new feature class that will fill in any holes in the community boundary.--201203--\n"
        )
        BGRF.write(
            "Delete any block groups that only touch the main body of the community boundary at one corner or are islands set apart from the main body of the community boundary.--201203--\n"
        )

        BG_PopRF.write(
            "Begin with EnviroAtlas community block groups.--201203--\n")
        BG_PopRF.write(
            "Append select census data from 2010 US Census SF1 Tables to block groups.--201203--\n"
        )

        BndRF.write(
            "Begin with the EnviroAtlas Community Block Groups.--201203--\n")
        BndRF.write(
            "Dissolve all the EnviroAtlas Community Block Groups into one polygon.--201203--\n"
        )

        #-------- COPY INPUT DATA --------------------------------------------
        """ Copy LC to Frequent if needed """
        if arcpy.Exists(str(workDir) + '/LC') == False:
            arcpy.CopyRaster_management(
                str(inDir) + '/LC/' + city + '_LC.tif',
                str(workDir) + '/LC', '', '', '', '', 'NONE', '', '', 'NONE')
        else:
            pass
        """ Set Environment Variables """
        arcpy.env.extent = 'LC'
        arcpy.env.snapRaster = 'LC'
        """ Copy BGs to Frequent if needed """
        if arcpy.Exists(str(workDir) + '/BG_Alb') == False:
            arcpy.FeatureClassToFeatureClass_conversion(
                str(inDir) + '/Bnd_Final.gdb/' + city + '_BG_Alb',
                str(workDir), 'BG_Alb')
            arcpy.DeleteField_management('BG_Alb', ['Include', 'PopWithin'])
        else:
            pass

        #-------- PROCESS BOUNDARIES -----------------------------------------
        """ Set Environment Variables """
        arcpy.env.extent = 'BG_Alb'
        arcpy.env.snapRaster = dasyDir
        """ Get Projection Information """
        descLC = arcpy.Describe(str(workDir) + '/LC')
        """ Project BG into UTM """
        arcpy.Project_management('BG_Alb', 'BG', descLC.spatialReference)
        """ Copy Counties to Frequent Dir and Project to UTM """
        arcpy.MakeFeatureLayer_management(
            str(inDir) + '/Input.gdb/Counties_Alb', 'Cty')
        arcpy.SelectLayerByLocation_management('Cty', 'CONTAINS', 'BG_Alb', '',
                                               'NEW_SELECTION')
        arcpy.FeatureClassToFeatureClass_conversion(
            str(inDir) + '/Input.gdb/Counties_Alb', str(workDir),
            'Counties_Alb')
        arcpy.SelectLayerByAttribute_management('Cty', 'CLEAR_SELECTION')
        arcpy.Project_management('Counties_Alb', 'Counties',
                                 descLC.spatialReference)
        """ Create Boundary and Buffer files """
        arcpy.Dissolve_management('BG_Alb', 'Bnd_Alb')
        arcpy.Dissolve_management('BG', 'Bnd')

        arcpy.Buffer_analysis('Bnd', 'Bnd_1km', '1 kilometer')
        arcpy.Buffer_analysis('Bnd', 'Bnd_5km', '5 kilometers')
        arcpy.Clip_analysis('Bnd', 'Counties', 'Bnd_Cty')
        arcpy.Buffer_analysis('Bnd_Cty', 'Bnd_Cty_500m', '500 meters')
        ReuseRF.write("Bnd_Cty--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Remove Holes from Buffer files """
        for buf in ('Bnd_1km', 'Bnd_5km', 'Bnd_Cty_500m'):
            arcpy.EliminatePolygonPart_management(buf, buf + '_EP', 'PERCENT',
                                                  '', '30', 'CONTAINED_ONLY')
            arcpy.Delete_management(buf)
            arcpy.Rename_management(buf + '_EP', buf)

#-------- MANIPULATE RASTER INPUTS -------------------------------------------
        """ Set Environment Variables """
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.snapRaster = freqDir + '/LC'
        """ Create a polygon version of the LC Area """
        ReC = arcpy.sa.Reclassify(
            str(workDir) + '/LC', 'Value',
            arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21, 1], [22, 1],
                                 [30, 1], [40, 1], [52, 1], [70, 1], [80, 1],
                                 [82, 1], [91, 1], [92, 1]]))
        ReC.save('AreaIO')
        arcpy.RasterToPolygon_conversion(
            str(freqDir) + '/AreaIO',
            str(freqDir) + '/LC_Poly', 'SIMPLIFY')
        arcpy.EliminatePolygonPart_management(
            str(freqDir) + '/LC_Poly',
            str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5', 'CONTAINED_ONLY')
        arcpy.Delete_management(str(freqDir) + '/LC_Poly')
        arcpy.Rename_management(
            str(freqDir) + '/LC_Poly_EP',
            str(freqDir) + '/LC_Poly')
        """ Set Environments """
        arcpy.env.extent = 'BG_Alb'
        arcpy.env.snapRaster = dasyDir
        """ Extract the dasymetrics for the Atlas Area """
        arcpy.env.extent = 'Bnd_Alb'
        outExtractByMask = arcpy.sa.ExtractByMask(dasyDir, 'Bnd_Alb')
        outExtractByMask.save('Dasy')
        ReuseRF.write("Dasy--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create a raster with the same cells as the dasymetric but unique values """
        arcpy.RasterToPoint_conversion('Dasy', 'Dasy_Pts', 'VALUE')
        arcpy.PointToRaster_conversion('Dasy_Pts', 'pointid', 'Dasy_Cells', '',
                                       '', '30')
        ReuseRF.write("Dasy_Cells--" + time.strftime('%Y%m%d--%H%M%S') +
                      '--\n')
        """ Calculate Dasy_Pop """
        arcpy.sa.ZonalStatisticsAsTable('BG_Alb', 'bgrp', 'Dasy', 'Dasy_ZS',
                                        '', 'SUM')
        arcpy.AddField_management('BG_Alb', 'Dasy_Pop', 'LONG')
        arcpy.JoinField_management('BG_Alb', 'bgrp', 'Dasy_ZS', 'bgrp',
                                   ['SUM'])
        arcpy.CalculateField_management('BG_Alb', 'Dasy_Pop', '!SUM!',
                                        'PYTHON_9.3')
        arcpy.DeleteField_management('BG_Alb', ['SUM'])
        arcpy.JoinField_management('BG', 'bgrp', 'BG_Alb', 'bgrp',
                                   ['Dasy_Pop'])
        ReuseRF.write("Dasy_Pop--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add Field to BG to use as the value for rasterization """
        arcpy.AddField_management('BG', 'EAID', 'SHORT')
        arcpy.CalculateField_management(
            "BG", "EAID", "autoIncrement()", "PYTHON_9.3",
            "rec=0\\ndef autoIncrement():\\n global rec\\n pStart = 1 #adjust start value, if req'd \\n pInterval = 1 #adjust interval value, if req'd\\n if (rec == 0): \\n  rec = pStart \\n else: \\n  rec = rec + pInterval \\n return rec"
        )
        """ Convert the block groups into raster format """
        arcpy.env.snapRaster = 'LC'
        arcpy.env.extent = 'LC'
        arcpy.PolygonToRaster_conversion('BG', 'EAID', 'BG_Rlc',
                                         'MAXIMUM_AREA', '', 1)
        ReuseRF.write("BG_Rlc--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- CREATE FINAL FILES ----------------------------------------------
        """ Create Final BG File """
        try:
            arcpy.Delete_management(finDir + '/' + str(city) + '_BG')
        except:
            pass
        arcpy.FeatureClassToFeatureClass_conversion('BG_Alb', finDir,
                                                    city + '_BG')
        allFields = [
            f.name for f in arcpy.ListFields(finDir + '/' + city + '_BG')
        ]
        for field in allFields:
            if field not in [
                    'bgrp', 'OBJECTID', 'Shape', 'Shape_Area', 'Shape_Length'
            ]:
                arcpy.DeleteField_management(finDir + '/' + city + '_BG',
                                             [field])
        BGRF.write(
            "Create a final version of the feature class for use in EnviroAtlas, removing all unnecessary attributes.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create Final Bnd File """
        try:
            arcpy.Delete_management(finDir + '/' + str(city) + '_Bnd')
        except:
            pass
        arcpy.FeatureClassToFeatureClass_conversion('Bnd_Alb', finDir,
                                                    city + '_Bnd')
        BndRF.write(
            "Copy polygon to final geodatabase for display in EnviroAtlas removing any unnecessary attributes.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create Final BG_Pop File """
        try:
            arcpy.Delete_management(finDir + '/' + str(city) + '_BG_Pop')
        except:
            pass

        arcpy.TableToTable_conversion('BG', finDir, city + '_BG_Pop')
        allFields = [
            f.name for f in arcpy.ListFields(finDir + '/' + city + '_BG_Pop')
        ]
        for field in allFields:
            if field not in [
                    'bgrp', 'OBJECTID', 'SUM_HOUSIN', 'SUM_POP10', 'under_1',
                    'under_1pct', 'under_13', 'under_13pc', 'over_70',
                    'over_70pct', 'NonWhite', 'NonWt_Pct', 'PLx2_Pop',
                    'PLx2_Pct'
            ]:
                arcpy.DeleteField_management(finDir + '/' + city + '_BG_Pop',
                                             [field])
        BG_PopRF.write(
            "Copy records to final table for display in EnviroAtlas, removing any unnecessary attributes.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        print 'Frequent End Time: ' + time.asctime() + '\n'

        #-------- COMPELETE LOGFILES ---------------------------------------------

        BGRF.close()
        BndRF.close()
        BG_PopRF.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------

    except:
        """ This part of the script executes if anything went wrong in the main script above """

        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        BGRF.write("\nSomething went wrong.\n\n")
        BGRF.write("Pyton Traceback Message below:")
        BGRF.write(traceback.format_exc())
        BGRF.write("\nArcMap Error Messages below:")
        BGRF.write(arcpy.GetMessages(2))
        BGRF.write("\nArcMap Warning Messages below:")
        BGRF.write(arcpy.GetMessages(1))

        BGRF.write("\n\nEnded at " + time.asctime() + '\n')
        BGRF.write("\n---End of Log File---\n")

        if BGRF:
            BGRF.close()
Example #18
0
def NrRd(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_NrRd.gdb')
    except:
        print 'NrRd GDB already exists'
    workDir = str(workFld) + '/' + city + '_NrRd.gdb'
    arcpy.env.workspace = workDir
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    """ Input Roads Data """
    navDir = inDir + '/Input.gdb/Streets_1234_Alb'
    """ Set Workspace Environments """
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_NrRd_Pop_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        popRF = open(reportfileName, 'w')

        tmpName = city + '_NrRd_PFor_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        pctRF = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first lines of report files """
        print 'Near Road Start Time: ' + time.asctime()
        popRF.write(
            "Begin with 2011 NavTeq Streets Layer and 1-Meter Land Cover Classification for the EnviroAtlas community created by the US EPA EnviroAtlas Team.--ANALYST-TIME--\n"
        )
        pctRF.write(
            "Begin with 2011 NavTeq Streets Layer and 1-Meter Land Cover Classification for the EnviroAtlas community created by the US EPA EnviroAtlas Team.--ANALYST-TIME--\n"
        )

        popRF.write("Project NavTeq Streets layer into UTM.--ANALYST-TIME--\n")
        pctRF.write("Project NavTeq Streets layer into UTM.--ANALYST-TIME--\n")

        popRF.write(
            "Clip NavTeq Streets Layer to 1-km Buffer of the EnviroAtlas community boundary.--ANALYST-TIME--\n"
        )
        pctRF.write(
            "Clip NavTeq Streets Layer to 1-km Buffer of the EnviroAtlas community boundary.--ANALYST-TIME--\n"
        )

        popRF.write(
            "Extract roads from NavTeq Streets where Func_Class = 1-4 to a new layer.--ANALYST-TIME--\n"
        )
        pctRF.write(
            "Extract roads from NavTeq Streets where Func_Class = 1-4 to a new layer.--ANALYST-TIME--\n"
        )

        popRF.write(
            "Add Field to the new streets layer: LANES (double) and calculate where LANES = TO_LANES + FROM_LANES.--ANALYST-TIME--\n"
        )
        pctRF.write(
            "Add Field to the new streets layer: LANES (double) and calculate where LANES = TO_LANES + FROM_LANES.--ANALYST-TIME--\n"
        )

        popRF.write(
            "For any records where LANES = 0, use Esri Aerial basemap to fill in correct lane value.--ANALYST-TIME--\n"
        )
        pctRF.write(
            "For any records where LANES = 0, use Esri Aerial basemap to fill in correct lane value.--ANALYST-TIME--\n"
        )

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.snapRaster = freqDir + '/LC'
        Expression = 'Shape_Length <= 1050'
        """-------- Reclassify LC into Binary Forest ----------------------------- """
        if arcpy.Exists(str(freqDir) + '/MForestIO') == False:
            outReclass = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 1], [52, 0], [70, 0],
                                     [80, 0], [82, 1], [91, 1], [92, 0]]))
            outReclass.save(str(freqDir) + '/MForestIO')
            popRF.write(
                "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            popRF.write(
                "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("MForestIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        else:
            popRF.write(
                "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--MForestIO"
                + '--\n')
            pctRF.write(
                "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--MForestIO"
                + '--\n')
        """-------- Create 29m Moving Window ------------------------------------- """
        outFocalStat = arcpy.sa.FocalStatistics(
            freqDir + '/MForestIO', arcpy.sa.NbrCircle(14.5, 'MAP'), 'SUM',
            'NODATA')
        outFocalStat.save('MFor_29C')
        popRF.write(
            "Run Focal Statistics on the Forest Binary Raster with a circular cell neighborhood with a radius of 14.5m in map units--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        pctRF.write(
            "Run Focal Statistics on the Forest Binary Raster with a circular cell neighborhood with a radius of 14.5m in map units--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Figure out the correct UTM Zone """
        prjNumb = arcpy.Describe(str(freqDir) + '/LC').spatialReference.name
        prjNumb = prjNumb[-3:]
        prjfile = prjDir + '/NAD 1983 UTM Zone ' + prjNumb + '.prj'
        """ -------- Create Road Buffer Lines ----------------------------------"""
        """ Create Road Polygons """
        arcpy.CopyFeatures_management(
            str(inDir) + '/NavTeq_D.gdb/' + str(city) + '_NavTeq_D',
            'NavTeq_D')
        arcpy.AddField_management('NavTeq_D', 'HalfWidth', 'DOUBLE')
        arcpy.CalculateField_management('NavTeq_D', 'HalfWidth', '!Width! / 2',
                                        'PYTHON_9.3')
        popRF.write(
            "Add Field to streets layer: HALFWIDTH (double) and calculate where HALFWIDTH = LANES * 3.6576 / 2.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        pctRF.write(
            "Add Field to streets layer: HALFWIDTH (double) and calculate where HALFWIDTH = LANES * 3.6576 / 2.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.Buffer_analysis('NavTeq_D', 'RoadEdge', 'HalfWidth', 'FULL',
                              'FLAT', 'ALL')
        arcpy.CalculateField_management('NavTeq_D', 'HalfWidth', '!Width! / 2',
                                        'PYTHON_9.3')
        popRF.write(
            "Buffer streets using the value in HALFWIDTH with options FULL, FLAT, ALL.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        pctRF.write(
            "Buffer streets using the value in HALFWIDTH with options FULL, FLAT, ALL.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create road buffer """
        arcpy.Buffer_analysis('RoadEdge', 'RoadBuffer', '11.5 Meters', 'FULL',
                              'FLAT', 'ALL')
        popRF.write(
            "Rebuffer the buffered streets by 11.5 meters with options FULL, FLAT, ALL.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        pctRF.write(
            "Rebuffer the buffered streets by 11.5 meters with options FULL, FLAT, ALL.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Convert the buffer into lines """
        arcpy.PolygonToLine_management('RoadBuffer', 'RdBuffLine')
        popRF.write(
            "Convert the resulting polygons into polylines - referred to as analysis lines.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        pctRF.write(
            "Convert the resulting polygons into polylines - referred to as analysis lines.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Remove interior lines based on cut-off point """
        arcpy.MakeFeatureLayer_management('RdBuffLine', 'BuffLine_lyr')
        arcpy.SelectLayerByAttribute_management('BuffLine_lyr',
                                                'NEW_SELECTION', Expression)
        arcpy.DeleteFeatures_management('BuffLine_lyr')
        arcpy.CopyFeatures_management('BuffLine_lyr', 'BuffLineUse')
        popRF.write(
            "Delete analysis lines that are unnecessary for analysis, for example, lines in between two lanes of a divided highway and lines on the interior of a freeway ramp.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        pctRF.write(
            "Delete analysis lines that are unnecessary for analysis, for example, lines in between two lanes of a divided highway and lines on the interior of a freeway ramp.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Calculate Forest Area -----------------------------"""
        """ Extract the tree values """
        outExtractByMask = arcpy.sa.ExtractByMask(workDir + '/MFor_29C',
                                                  'BuffLineUse')
        outExtractByMask.save('ForBuff')
        popRF.write(
            "Extract the Focal Statistics Raster using the analysis lines.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')
        pctRF.write(
            "Extract the Focal Statistics Raster using the analysis lines.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- POPULATION ANALYSIS ---------------------------------------------
        """ Reclassify into sufficient and insufficent tree buffer. """
        outReclass2 = arcpy.sa.Reclassify(
            'ForBuff', 'Value',
            arcpy.sa.RemapRange([[0, 154, 1], [155, 620, 2]]))
        outReclass2.save('ForBinary')
        popRF.write(
            "Reclassify the extracted raster into above and below 25% tree cover: 0-154 = 1; 155-613 = 2.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create lines of sufficent and insufficient tree buffer """
        arcpy.RasterToPolygon_conversion('ForBinary', 'For_YN', 'NO_SIMPLIFY')
        popRF.write(
            "Convert the reclassified raster into a polygon WITHOUT simplifying.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.Intersect_analysis(['BuffLineUse', 'For_YN', freqDir + '/BG'],
                                 'Line_YN')
        popRF.write(
            "Intersect the analysis line with the polygons and the community block groups, splitting the analysis line into pieces of greater than and less than 25% tree cover within each block group.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.AddField_management('Line_YN', 'KMs', 'FLOAT')
        arcpy.CalculateField_management('Line_YN', 'KMs',
                                        '!shape.length@kilometers!',
                                        'PYTHON_9.3')
        popRF.write(
            "Add a new field to the analysis line: Length_KM (double) and calculate the geometry of the lines using length in kilometers.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calcualte statistics on road lenghts """
        arcpy.Select_analysis('Line_YN', 'Line_Y', '"gridcode" = 2')
        arcpy.Statistics_analysis('Line_Y', 'KMpBG_Y', [['KMs', 'SUM']],
                                  [['bgrp']])
        arcpy.Select_analysis('Line_YN', 'Line_N', '"gridcode" = 1')
        arcpy.Statistics_analysis('Line_N', 'KMpBG_N', [['KMs', 'SUM']],
                                  [['bgrp']])
        popRF.write(
            "Summarize the analysis line layer by block group and greater than vs less than 25% tree cover where the summary statistics is the sum of Length_KM.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create full buffer of roads for popualtion counts """
        arcpy.Buffer_analysis('Line_YN', 'YN_289L', '288.5 Meters', 'LEFT',
                              'FLAT', 'ALL')
        arcpy.Buffer_analysis('Line_YN', 'YN_11R', '11.5 Meters', 'RIGHT',
                              'FLAT', 'ALL')
        arcpy.Buffer_analysis('Line_YN', 'YN_14L', '14.5 Meters', 'LEFT',
                              'FLAT', 'ALL')
        arcpy.Merge_management(['YN_289L', 'YN_11R'], 'YN_300')
        popRF.write(
            "Buffer the analysis line twice: by 288.5m with LEFT, FLAT, ALL and by 11.5m with RIGHT, FLAT, ALL. Merge the two buffers together to create the population analysis zone.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create insufficient buffer area """
        arcpy.SplitLine_management('Line_N', 'Line_N_Split')
        arcpy.Buffer_analysis('Line_N_Split', 'N_289L_ND', '288.5 Meters',
                              'LEFT', 'FLAT', 'NONE')
        arcpy.Buffer_analysis('Line_N_Split', 'N_289L_D', '288.5 Meters',
                              'LEFT', 'FLAT', 'ALL')
        arcpy.Merge_management(['N_289L_D', 'N_289L_ND', 'YN_11R', 'YN_14L'],
                               'N_300_ND')
        arcpy.Dissolve_management('N_300_ND', 'N_300')
        popRF.write(
            "Buffer the analysis line twice again: by 14.5m with LEFT, FLAT, ALL and by 11.5m with RIGHT, FLAT, ALL. Select the analysis line pieces with grid_code = 1 and buffer by 288.5m with LEFT, FLAT, ALL. Merge the three buffers together to identify areas of less than 25% tree cover.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create sufficient buffer area """
        arcpy.Erase_analysis('YN_300', 'N_300', 'BuffSuff')
        popRF.write(
            "Erase the areas of less than 25% tree cover from the population analysis area to identify areas buffered by greater than 25% tree cover.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create real insufficient buffer area """
        arcpy.Erase_analysis('YN_300', 'BuffSuff', 'BuffInSuff')
        popRF.write(
            "Clip the area buffered by less than 25% tree cover to the population analysis zone for consistency's sake.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Convert sufficient and insufficient areas into Albers and rasters """
        prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
        arcpy.Project_management('BuffInSuff', 'BuffInSuff_Alb', prjfile)
        arcpy.Project_management('BuffSuff', 'BuffSuff_Alb', prjfile)
        popRF.write(
            "Project both the less than and greater than areas into Albers.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.AddField_management('BuffInSuff_Alb', 'InSuff', 'SHORT')
        arcpy.CalculateField_management('BuffInSuff_Alb', 'InSuff', '1',
                                        'PYTHON_9.3')
        arcpy.AddField_management('BuffSuff_Alb', 'Suff', 'SHORT')
        arcpy.CalculateField_management('BuffSuff_Alb', 'Suff', '1',
                                        'PYTHON_9.3')
        popRF.write(
            "Add a field to each polygon layer: Value (short) and calculate where Value=1.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/Dasy'
        arcpy.env.extent = freqDir + '/Dasy'
        """ Convert Rasters to Polygons """
        arcpy.PolygonToRaster_conversion('BuffInSuff_Alb', 'InSuff',
                                         'InSuff_R', 'Maximum_Area', '', 30)
        arcpy.PolygonToRaster_conversion('BuffSuff_Alb', 'Suff', 'Suff_R',
                                         'Maximum_Area', '', 30)
        popRF.write("Convert each polygon layer into a raster. --" +
                    time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Extract by Mask and Calculate Zonal Statistics for Insufficient and Sufficient Areas """
        for val in ('InSuff', 'Suff'):
            EbM = arcpy.sa.ExtractByMask(freqDir + '/Dasy', val + '_R')
            EbM.save(val + '_Pop')
            arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                            val + '_Pop', 'Pop_' + str(val),
                                            'DATA', 'SUM')
        popRF.write(
            "Extract by Mask the EnviroAtlas Dasymetric (2011/October 2015) within each of the rasterized zones.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        popRF.write(
            "Calculate Zonal Statistics as a Table for the two extracted dasymetric rasters with the zones being the 2010 block groups within the EnviroAtlas community boundary.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Calculate Total Dasy Population, if necessary ------------------ """
        """ Use the existing data """
        fieldNames = [f.name for f in arcpy.ListFields(freqDir + '/BG_Alb')]
        if 'Dasy_Pop' in fieldNames:
            popRF.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--Dasy_Pop"
                + '--\n')
            """ Create population data """
        else:
            arcpy.AddField_management(freqDir + '/BG_Alb', 'Dasy_Pop', 'LONG')
            arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                            freqDir + '/Dasy',
                                            freqDir + '/Dasy_ZS', '', 'SUM')
            arcpy.JoinField_management(freqDir + '/BG_Alb', 'bgrp',
                                       freqDir + '/Dasy_ZS', 'bgrp', ['SUM'])
            arcpy.CalculateField_management(freqDir + '/BG_Alb', 'Dasy_Pop',
                                            '!SUM!', 'PYTHON_9.3')
            arcpy.DeleteField_management(freqDir + '/BG_Alb', ['SUM'])
            arcpy.JoinField_management(freqDir + '/BG', 'bgrp',
                                       freqDir + '/BG_Alb', 'bgrp',
                                       ['Dasy_Pop'])
            popRF.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write('Dasy_Pop--' + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        """-------- Create Final Table --------------------------------------------- """
        arcpy.TableToTable_conversion(freqDir + '/BG', workDir, 'NrRd_Pop', '',
                                      'bgrp')
        arcpy.DeleteField_management('NrRd_Pop', [
            'NonWhite', 'PLx2_Pop', 'PLx2_Pct', 'SUM_HOUSIN', 'under_1',
            'under_1pct', 'under_13', 'under_13pc', 'over_70', 'over_70pct',
            'Shape_Length', 'Shape_Leng', 'NonWhite_Pop', 'NonWt_Pct',
            'Density', 'Shape_Le_1', 'Shape_Area', 'Black', 'Blackpct',
            'PopWithin', 'PctWithin', 'Include', 'City', 'Area', 'LandA_M',
            'LandA_M_1', 'NonWhite_P', 'H_Income_M', 'State'
        ])
        nrrdtbl = 'NrRd_Pop'
        popRF.write(
            "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP and Dasy_Pop fields--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add fields to new table """
        arcpy.AddField_management(nrrdtbl, 'IBuff_Pop', 'LONG')
        arcpy.AddField_management(nrrdtbl, 'SBuff_Pop', 'LONG')
        arcpy.AddField_management(nrrdtbl, 'Buff_Pop', 'LONG')
        arcpy.AddField_management(nrrdtbl, 'IBuff_Pct', 'FLOAT', 5, 2)
        arcpy.AddField_management(nrrdtbl, 'SBuff_Pct', 'FLOAT', 5, 2)
        arcpy.AddField_management(nrrdtbl, 'Buff_Pct', 'FLOAT', 5, 2)
        arcpy.AddField_management(nrrdtbl, 'Lane_KMN', 'DOUBLE', 7, 2)
        arcpy.AddField_management(nrrdtbl, 'Lane_KMY', 'DOUBLE', 7, 2)
        arcpy.AddField_management(nrrdtbl, 'Lane_KMAll', 'DOUBLE', 7, 2)
        arcpy.AddField_management(nrrdtbl, 'Lane_PctSB', 'FLOAT', 5, 2)
        arcpy.AddField_management(nrrdtbl, 'Lane_PctIB', 'FLOAT', 5, 2)
        popRF.write(
            "Add fields to the new table for IBuff_Pop (long), SBuff_Pop (long), Buff_Pop (long), IBuff_Pct (float), SBuff_Pct (float), Buff_Pct (float), Lane_KMN (double), Lane_KMY (double), Lane_KMAll (double), Lane_PctSB (float), Lane_PctIB (float).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join Each Table to the final table and calculate necessary records """
        arcpy.JoinField_management(nrrdtbl, 'bgrp', 'Pop_InSuff', 'bgrp',
                                   ['SUM'])
        arcpy.CalculateField_management(nrrdtbl, 'IBuff_Pop', '!SUM!',
                                        'PYTHON_9.3')
        arcpy.DeleteField_management(nrrdtbl, 'SUM')

        arcpy.JoinField_management(nrrdtbl, 'bgrp', 'Pop_Suff', 'bgrp',
                                   ['SUM'])
        arcpy.CalculateField_management(nrrdtbl, 'SBuff_Pop', '!SUM!',
                                        'PYTHON_9.3')
        arcpy.DeleteField_management(nrrdtbl, 'SUM')

        arcpy.JoinField_management(nrrdtbl, 'bgrp', 'KMpBG_N', 'bgrp',
                                   ['SUM_KMs'])
        arcpy.CalculateField_management(nrrdtbl, 'Lane_KMN', '!SUM_KMs!',
                                        'PYTHON_9.3')
        arcpy.DeleteField_management(nrrdtbl, 'SUM_KMs')

        arcpy.JoinField_management(nrrdtbl, 'bgrp', 'KMpBG_Y', 'bgrp',
                                   ['SUM_KMs'])
        arcpy.CalculateField_management(nrrdtbl, 'Lane_KMY', '!SUM_KMs!',
                                        'PYTHON_9.3')
        arcpy.DeleteField_management(nrrdtbl, 'SUM_KMs')
        popRF.write(
            "Join the zonal statistics and length statistics tables with the new table and calculate IBuff_Pop, SBuff_Pop, Lane_KMN, Lane_KMY. Remove Joins.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Fill Null Values with Zeros """
        arcpy.MakeTableView_management(nrrdtbl, 'NrRdTbl')
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'SBuff_Pop IS NULL')
        arcpy.CalculateField_management('NrRdTbl', 'SBuff_Pop', '0',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'IBuff_Pop IS NULL')
        arcpy.CalculateField_management('NrRdTbl', 'IBuff_Pop', '0',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'Lane_KMN IS NULL')
        arcpy.CalculateField_management('NrRdTbl', 'Lane_KMN', '0',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'Lane_KMY IS NULL')
        arcpy.CalculateField_management('NrRdTbl', 'Lane_KMY', '0',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'CLEAR_SELECTION')
        """ Calculate additional fields. """
        arcpy.CalculateField_management('NrRdTbl', 'Buff_Pop',
                                        '!IBuff_Pop! + !SBuff_Pop!',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('NrRdTbl', 'Lane_KMAll',
                                        '!Lane_KMN! + !Lane_KMY!',
                                        'PYTHON_9.3')

        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'Dasy_Pop > 0')
        arcpy.CalculateField_management(
            'NrRdTbl', 'IBuff_Pct',
            '"%.2f" % (float(!IBuff_Pop!)/float(!Dasy_Pop!) * 100)',
            'PYTHON_9.3')
        arcpy.CalculateField_management(
            'NrRdTbl', 'SBuff_Pct',
            '"%.2f" % (float(!SBuff_Pop!)/float(!Dasy_Pop!) * 100)',
            'PYTHON_9.3')
        arcpy.CalculateField_management(
            'NrRdTbl', 'Buff_Pct',
            '"%.2f" % (float(!Buff_Pop!)/float(!Dasy_Pop!) * 100)',
            'PYTHON_9.3')

        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'Lane_KMAll > 0')
        arcpy.CalculateField_management(
            'NrRdTbl', 'Lane_PctIB',
            '"%.2f" % (!Lane_KMN!/!Lane_KMAll! * 100)', 'PYTHON_9.3')
        arcpy.CalculateField_management(
            'NrRdTbl', 'Lane_PctSB',
            '"%.2f" % (!Lane_KMY!/!Lane_KMAll! * 100)', 'PYTHON_9.3')

        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'Lane_KMAll = 0')
        arcpy.CalculateField_management('NrRdTbl', 'Lane_PctIB', '0',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('NrRdTbl', 'Lane_PctSB', '0',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'CLEAR_SELECTION')
        popRF.write(
            "Calculate remaining fields: Buff_Pop = IBuff_Pop + SBuff_Pop; IBuff_Pct = IBuff_Pop/Dasy_Pop*100; SBuff_Pct = SBuff_Pop/Dasy_Pop*100; Lane_KMAll = Lane_KMN + Lane_KMY; Lane_PctSB = Lane_KMY/Lane_KMAll*100; Lane_PctIB = Lane_KMN/Lane_KMAll*100. --"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Copy into Working Directory """
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION',
                                                'Dasy_Pop = 0')
        arcpy.CalculateField_management('NrRdTbl', 'IBuff_Pct', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('NrRdTbl', 'SBuff_Pct', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('NrRdTbl', 'Buff_Pct', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('NrRdTbl', 'IBuff_Pop', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('NrRdTbl', 'SBuff_Pop', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('NrRdTbl', 'Buff_Pop', '-99999',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('NrRdTbl', 'CLEAR_SELECTION')
        popRF.write(
            "Calculate Fields where Dasy_Pop = 0: IBuff_Pop, SBuff_Pop, Buff_Pop, IBuff_Pct, SBuff_Pct, Buff_Pct = -99999--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            ReC = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
        """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.env.extent = freqDir + '/LC'
            arcpy.env.snapRaster = freqDir + '/LC'
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
        """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            """ Identify BGs within 50m of the LC edge """
            arcpy.Buffer_analysis(
                str(freqDir) + '/LC_Poly', 'LC_Poly_Minus15', '-15 meters')
            arcpy.MakeFeatureLayer_management('LC_Poly_Minus15', 'Minus15')
            arcpy.MakeFeatureLayer_management(freqDir + '/BG', 'BG')

            arcpy.SelectLayerByLocation_management('BG', 'COMPLETELY_WITHIN',
                                                   'Minus15', '',
                                                   'NEW_SELECTION', 'INVERT')

            bgValue = float(arcpy.GetCount_management('BG').getOutput(0))
            """ For all BGs too close to the LC edge, assign both fields a value of -99998 """
            if bgValue > 0:
                bgrps = []
                cursor = arcpy.SearchCursor('BG')
                for row in cursor:
                    value = row.getValue('bgrp')
                    bgrps.append(value)
                bgrps = list(set(bgrps))
                expression = ''
                for bgrp in bgrps:
                    expression = expression + " OR bgrp = '" + str(bgrp) + "'"
                expression = expression[4:]
                arcpy.SelectLayerByAttribute_management(
                    'NrRdTbl', 'NEW_SELECTION', expression)
                for field in [
                        'IBuff_Pct', 'SBuff_Pct', 'Buff_Pct', 'IBuff_Pop',
                        'SBuff_Pop', 'Buff_Pop', 'Lane_PctIB', 'Lane_PctSB'
                ]:
                    arcpy.CalculateField_management('NrRdTbl', str(field),
                                                    '-99998', 'PYTHON_9.3')
                arcpy.SelectLayerByAttribute_management(
                    'NrRdTbl', 'CLEAR_SELECTION')
                popRF.write(
                    "Calculate Field for BGs within 50m of the edge of the land cover, all fields = -99998.--"
                    + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final table """
        arcpy.CopyRows_management('NrRdTbl', 'NrRd_Pop_Final')
        try:
            arcpy.Delete_management(finalDir + '/' + city + '_NrRd_Pop')
        except:
            pass
        arcpy.TableToTable_conversion('NrRd_Pop_Final', finalDir,
                                      city + '_NrRd_Pop')
        allFields = [
            f.name
            for f in arcpy.ListFields(finalDir + '/' + city + '_NrRd_Pop')
        ]
        for field in allFields:
            if field not in [
                    'bgrp', 'OBJECTID', 'IBuff_Pop', 'SBuff_Pop', 'Buff_Pop',
                    'Buff_Pct', 'Lane_PctSB', 'Lane_PctIB'
            ]:
                arcpy.DeleteField_management(
                    finalDir + '/' + city + '_NrRd_Pop', [field])
        popRF.write(
            "Export the fields to be displayed in the EnviroAtlas to a final gdb table: IBuff_Pop, SBuff_Pop, Buff_Pop, Buff_Pct, Lane_PctSB, Lane_PctIB.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- FOUNDATIONAL LAYER ANALYSIS -----------------------------------
        """ Set Environments """
        arcpy.env.extent = workDir + '/MFor_29C'
        arcpy.env.snapRaster = workDir + '/MFor_29C'
        """-------- Reclassify Moving Window into Percentage Breaks ------------------- """
        outReclass2 = arcpy.sa.Reclassify(
            'ForBuff', 'Value',
            arcpy.sa.RemapRange([[0, 77, 12], [78, 154, 25], [155, 307, 50],
                                 [308, 460, 75], [461, 613, 100]]))
        outReclass2.save('For_5Cls')
        pctRF.write(
            "Reclassify the extracted raster into percentage classes: 0-77 = 12.5; 78-154 = 25; 155-307 = 50; 308-460 = 75; 461-613 = 100.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Convert to polygon """
        arcpy.RasterToPolygon_conversion('For_5Cls', 'For_5Poly',
                                         'NO_SIMPLIFY')
        pctRF.write(
            "Convert the reclassified raster into a polygon WITHOUT simplifying.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join the Polygon with the Road Buffer lines """
        arcpy.Intersect_analysis(['BuffLineUse', 'For_5Poly'], 'Class5', 'ALL',
                                 '', 'LINE')
        pctRF.write(
            "Intersect the analysis line with the polygons, splitting the analysis line into pieces representing each percentage class.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.Clip_analysis('Class5', freqDir + '/Bnd_Cty', 'Class5_Bnd')
        pctRF.write(
            "Clip the analysis line to the EnviroAtlas community boundary and the county lines.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.Dissolve_management('Class5_Bnd', 'Class5_D', 'gridcode')
        pctRF.write(
            "Dissolve the analysis line based on the percentage classes.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.AddField_management('Class5_D', 'PctTree', 'FLOAT')
        codeblock = '''def CalPctTree(gc):
                if (gc == 12):
                    return "12.5"
                else:
                    return gc
                '''
        arcpy.CalculateField_management('Class5_D', 'PctTree',
                                        'CalPctTree(!gridcode!)', 'PYTHON_9.3',
                                        codeblock)
        arcpy.DeleteField_management('Class5_D', ['gridcode'])
        pctRF.write(
            "Add field to the analysis line: PctTree (float) and calculate where PctTree = gridcode--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ If the LC isn't large enough, delete erroneous line segments """
        if bigEnough == 0:
            arcpy.Buffer_analysis(
                str(freqDir) + '/LC_Poly', 'LC_Poly_Minus_15', '15 meters')
            arcpy.Union_analysis(
                [str(freqDir) + '/Bnd_Cty', 'LC_Poly_Minus_15'],
                'LC_Minus_BndCty_Union_15', 'ONLY_FID')
            arcpy.Select_analysis(
                'LC_Minus_BndCty_Union_15', 'EdgeAffectedArea_15',
                'FID_Bnd_Cty > 0 AND FID_LC_Poly_Minus_15 = -1')

            arcpy.MakeFeatureLayer_management('Class5_D', 'Class5_lyr')
            arcpy.MakeFeatureLayer_management('EdgeAffectedArea_15', 'EEArea')
            arcpy.SelectLayerByLocation_management('Class5_lyr', 'INTERSECT',
                                                   'EEArea', '',
                                                   'NEW_SELECTION')
            arcpy.SelectLayerByAttribute_management('Class5_lyr',
                                                    'SWITCH_SELECTION')
            arcpy.CopyFeatures_management('Class5_lyr',
                                          'NrRd_PFor_EdgeCorrected')
            arcpy.SelectLayerByAttribute_management('Class5_lyr',
                                                    'CLEAR_SELECTION')
            pctRF.write(
                "Calculate Field for BGs within 50m of the edge of the land cover, all fields = -99998.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Project into Albers """
        prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
        try:
            arcpy.Project_management('NrRd_PFor_EdgeCorrected', 'NrRd_PFor',
                                     prjfile)
        except:
            arcpy.Project_management('Class5_D', 'NrRd_PFor', prjfile)
        pctRF.write("Project the analysis line into Albers--" +
                    time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final feature class """
        try:
            arcpy.Delete_management(finalDir + '/' + city + '_NrRd_PFor')
        except:
            pass
        try:
            arcpy.FeatureClassToFeatureClass_conversion(
                'NrRd_PFor_EdgeCorrected', finalDir, city + '_NrRd_PFor')
        except:
            arcpy.FeatureClassToFeatureClass_conversion(
                'NrRd_PFor', finalDir, city + '_NrRd_PFor')
        pctRF.write(
            "Export the analysis line to a geodatabase for display in EnviroAtlas.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        print 'NrRd_PFor End Time: ' + time.asctime() + '\n'

        #-------- COMPELETE LOGFILES ---------------------------------------------
        pctRF.close()
        popRF.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        pctRF.write("\nSomething went wrong.\n\n")
        pctRF.write("Pyton Traceback Message below:")
        pctRF.write(traceback.format_exc())
        pctRF.write("\nArcMap Error Messages below:")
        pctRF.write(arcpy.GetMessages(2))
        pctRF.write("\nArcMap Warning Messages below:")
        pctRF.write(arcpy.GetMessages(1))

        pctRF.write("\n\nEnded at " + time.asctime() + '\n')
        pctRF.write("\n---End of Log File---\n")

        if pctRF:
            pctRF.close()
Example #19
0
CA.AggregatePolygons(
    in_features=intermediates_outline[1],
    out_feature_class=intermediates_outline[2],
    aggregation_distance=250,
    minimum_area=0,
    minimum_hole_size="100000000000000000000000000000000000000000000",
    orthogonality_option="ORTHOGONAL",
    barrier_features="",
    out_table="")

print "Eliminating holes in aggregated polygon"
# Eliminate all holes in the aggregated polygon
arcpy.EliminatePolygonPart_management(
    in_features=intermediates_outline[2],
    out_feature_class=intermediates_outline[3],
    condition="AREA",
    part_area="100000000000000000000000000000000000000000000",
    part_area_percent="",
    part_option="ANY")

print "Dissolving aggregated polygon to remove leftover interior pieces"
arcpy.Dissolve_management(in_features=intermediates_outline[3],
                          out_feature_class=intermediates_outline[4])

print "Performing final aggregation to include floating peripheral features"
CA.AggregatePolygons(
    in_features=intermediates_outline[4],
    out_feature_class=taxlotOutline_loc,
    aggregation_distance=350,
    minimum_area=0,
    minimum_hole_size="100000000000000000000000000000000000000000000",
def createBoundaryFeatureClass(raster_footprint,
                               target_raster_boundary,
                               statistics_fields="",
                               alter_field_infos=None):
    a = datetime.datetime.now()
    aa = a

    raster_boundary_1 = "{}1".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_1, True)
    arcpy.Buffer_analysis(in_features=raster_footprint,
                          out_feature_class=raster_boundary_1,
                          buffer_distance_or_field="10 Meters",
                          line_side="FULL",
                          line_end_type="ROUND",
                          dissolve_option="NONE",
                          method="PLANAR")
    arcpy.RepairGeometry_management(in_features=raster_boundary_1,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_1)
    a = doTime(a, "\tBuffer out into {}".format(raster_boundary_1))

    raster_boundary_2 = "{}2".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_2, True)
    arcpy.AddMessage(
        "\tDissolving with statistics: {}".format(statistics_fields))
    arcpy.Dissolve_management(in_features=raster_boundary_1,
                              out_feature_class=raster_boundary_2,
                              dissolve_field=FIELD_INFO[ELEV_TYPE][0],
                              statistics_fields=statistics_fields)
    arcpy.RepairGeometry_management(in_features=raster_boundary_2,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_2)
    a = doTime(a, "\tDissolved to {}".format(raster_boundary_2))

    deleteFileIfExists(raster_boundary_1, True)

    alterFields(alter_field_infos, raster_boundary_2)
    a = doTime(a, "\tAltered Fields on {}".format(raster_boundary_2))

    raster_boundary_3 = "{}3".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_3, True)
    arcpy.EliminatePolygonPart_management(in_features=raster_boundary_2,
                                          out_feature_class=raster_boundary_3,
                                          condition="AREA",
                                          part_area="10000 SquareMiles",
                                          part_area_percent="0",
                                          part_option="CONTAINED_ONLY")
    arcpy.RepairGeometry_management(in_features=raster_boundary_3,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_3)
    a = doTime(a,
               "\tEliminated internal parts on {}".format(raster_boundary_3))

    # Don't delete raster boundary 2 because we need it later

    # JWS 4/26 - Bend Simplify -> Point Remove & 20 Meters -> 0.1 Meters
    raster_boundary_4 = "{}4".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_4, True)
    arcpy.SimplifyPolygon_cartography(in_features=raster_boundary_3,
                                      out_feature_class=raster_boundary_4,
                                      algorithm="POINT_REMOVE",
                                      tolerance="0.1 Meters",
                                      minimum_area="0 Unknown",
                                      error_option="RESOLVE_ERRORS",
                                      collapsed_point_option="NO_KEEP",
                                      in_barriers="")
    arcpy.RepairGeometry_management(in_features=raster_boundary_4,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_4)
    a = doTime(a, "\tSimplified to {}".format(raster_boundary_4))

    deleteFileIfExists(raster_boundary_3, True)

    deleteFileIfExists(target_raster_boundary, True)
    arcpy.Buffer_analysis(in_features=raster_boundary_4,
                          out_feature_class=target_raster_boundary,
                          buffer_distance_or_field="-10 Meters",
                          line_side="FULL",
                          line_end_type="ROUND",
                          dissolve_option="NONE",
                          method="PLANAR")
    arcpy.RepairGeometry_management(in_features=target_raster_boundary,
                                    delete_null="DELETE_NULL")
    deleteFields(target_raster_boundary)
    a = doTime(a, "\tBuffer back into {}".format(target_raster_boundary))

    deleteFileIfExists(raster_boundary_4, True)

    if alter_field_infos is not None and len(alter_field_infos) > 0:
        fields = ";".join([field[1] for field in alter_field_infos])
        arcpy.JoinField_management(in_data=target_raster_boundary,
                                   in_field="OBJECTID",
                                   join_table=raster_boundary_2,
                                   join_field="OBJECTID",
                                   fields=fields)
        # Utility.addToolMessages()
        a = doTime(
            a, "\tJoined {} with {}".format(target_raster_boundary,
                                            raster_boundary_2))

    deleteFileIfExists(raster_boundary_2, True)

    a = doTime(
        aa, "Dissolved raster footprints to dataset boundary {} ".format(
            target_raster_boundary))
print("Process: Extract Values to Points")
# Process: Extract Values to Points
arcpy.gp.ExtractValuesToPoints_sa(NHLDandBuffLakes_InsidePoint_shp, NHLD_DEMs_1_3_mosaic_img, NHLDandBuffLakes_InsidePoint_ElevVal_shp, "NONE", "VALUE_ONLY")

print("Process: Copy Features (_ElevVal)")
# Process: Copy Features
arcpy.CopyFeatures_management(NHLDandBuffLakes, NHLDandBuffLakes_ElevVal_shp, "", "0", "0", "0")

print("Process: Join Field (RASTERVALU)")
# Process: Join Field
arcpy.JoinField_management(NHLDandBuffLakes_ElevVal_shp, "Permanent_", NHLDandBuffLakes_InsidePoint_ElevVal_shp, "Permanent_", "RASTERVALU")

print("Process: Eliminate Polygon Part (_ElmIslands)")
# Process: Eliminate Polygon Part (Eliminate Lake Islands, also creates solid polygon without interior holes) (Lakes on islands within lakes will be removed in later script)
arcpy.EliminatePolygonPart_management(NHLDandBuffLakes_ElevVal_shp, NHLDandBuffLakes_ElevVal_ElmIslands_shp, "PERCENT", "", "99.9", "CONTAINED_ONLY")

print("Calc New Perim, Area, WALA after islands have been removed from the lake shapes")
#Add Field Perim_m_1
# Process: Add Field
arcpy.AddField_management(NHLDandBuffLakes_ElevVal_ElmIslands_shp, "Perim_m_1", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
#Calc Field Perim_m_1
arcpy.CalculateField_management(NHLDandBuffLakes_ElevVal_ElmIslands_shp, "Perim_m_1","!shape.geodesicLength@meters!","PYTHON_9.3")

#Add Field Area_m2_1
arcpy.AddField_management(NHLDandBuffLakes_ElevVal_ElmIslands_shp, "Area_m2_1", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
#Calc Field Area_m2_1
arcpy.CalculateField_management(NHLDandBuffLakes_ElevVal_ElmIslands_shp, "Area_m2_1","!shape.geodesicArea@squaremeters!","PYTHON_9.3")

#Add Field WALA_1
arcpy.AddField_management(NHLDandBuffLakes_ElevVal_ElmIslands_shp, "WALA_1", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
Example #22
0
def createBoundaryFeatureClass(raster_footprint, target_raster_boundary, statistics_fields="", alter_field_infos=None):
    a = datetime.datetime.now()
    aa = a
    deleteFields(raster_footprint)

    lasd_boundary_0 = "{}0".format(target_raster_boundary)
    lasd_boundary_1 = "{}1".format(target_raster_boundary)

    deleteFileIfExists(lasd_boundary_0, True)
    deleteFileIfExists(lasd_boundary_1, True)

    arcpy.AddMessage("\tMultipart to Singlepart")
    arcpy.MultipartToSinglepart_management(in_features=raster_footprint, out_feature_class=lasd_boundary_0)
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_0, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_0)

    arcpy.AddMessage("\tBuffering")
    arcpy.Buffer_analysis(in_features=lasd_boundary_0, out_feature_class=lasd_boundary_1, buffer_distance_or_field="10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", method="PLANAR")
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_1, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_1)

    deleteFileIfExists(lasd_boundary_0, True)

    lasd_boundary_2 = "{}2".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_2, True)
    arcpy.AddMessage("\tDissolving with statistics: {}".format(statistics_fields))
    arcpy.Dissolve_management(
        in_features=lasd_boundary_1,
        out_feature_class=lasd_boundary_2,
        statistics_fields=statistics_fields
        )
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_2, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_2)
    a = doTime(a, "\tDissolved to {}".format(lasd_boundary_2))


    if alter_field_infos is not None:
        for alter_field_info in alter_field_infos:
            try:
                alterField(lasd_boundary_2, alter_field_info[0], alter_field_info[1], alter_field_info[2])
            except:
                pass

        a = doTime(a, "\tRenamed summary fields")

    lasd_boundary_3 = "{}3".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_3, True)
    arcpy.EliminatePolygonPart_management(in_features=lasd_boundary_2, out_feature_class=lasd_boundary_3, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY")
    arcpy.RepairGeometry_management(in_features=lasd_boundary_3, delete_null="DELETE_NULL")
    deleteFileIfExists(lasd_boundary_1, True)
    deleteFields(lasd_boundary_3)
    lasd_boundary_4 = "{}4".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_4, True)
    arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_3, out_feature_class=lasd_boundary_4, algorithm="BEND_SIMPLIFY", tolerance="20 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="")
    arcpy.RepairGeometry_management(in_features=lasd_boundary_4, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_4)
    #try:
    #    arcpy.DeleteField_management(in_table=lasd_boundary_4, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol")
    #except:
    #    pass
    deleteFileIfExists(lasd_boundary_3, True)

    deleteFileIfExists(target_raster_boundary, True)
    arcpy.Buffer_analysis(in_features=lasd_boundary_4, out_feature_class=target_raster_boundary, buffer_distance_or_field="-10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="ALL", method="PLANAR")
    arcpy.RepairGeometry_management(in_features=target_raster_boundary, delete_null="DELETE_NULL")
    deleteFields(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_4, True)

    if alter_field_infos is not None and len(alter_field_infos) > 0:
        fields = ";".join([field[1] for field in alter_field_infos])
        arcpy.JoinField_management(in_data=target_raster_boundary, in_field="OBJECTID", join_table=lasd_boundary_2, join_field="OBJECTID", fields=fields)
        Utility.addToolMessages()

    deleteFileIfExists(lasd_boundary_2, True)

    a = doTime(aa, "Dissolved las footprints to dataset boundary {} ".format(target_raster_boundary))
Example #23
0
    print ( "TauDEM; Alpha Angle = 23 degrees; Converted To Shapefile")

    # ------------------------------------------------------------------------------------------------------------------------------------------------------


    # --- Merging Data

    # --- Merge - Class 1
    arcpy.Merge_management(inputs="AvalanchePathClass1;Class1", output="Class1_Merge", field_mappings='Id "Id" true true false 10 Long 0 10 ,First,#,AvalanchePathClass1,Id,-1,-1,Class1,Id,-1,-1;gridcode "gridcode" true true false 10 Long 0 10 ,First,#,AvalanchePathClass1,gridcode,-1,-1,Class1,gridcode,-1,-1')

    # --- Dissolve - Class 1
    arcpy.Dissolve_management(in_features="Class1_Merge", out_feature_class="Class1_Dissolve", dissolve_field="gridcode", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES")

    # --- Eliminate Polygon Part (All Clusters Smaller Than 10.000 Square Meters Are Eliminated Within The Polygon)
    arcpy.EliminatePolygonPart_management(in_features="Class1_Dissolve", out_feature_class="Class1_Eliminate", condition="AREA", part_area="10000 SquareMeters", part_area_percent="0", part_option="CONTAINED_ONLY")

    print ( "Class 1 Merged")

    # --- Merge - Class 2
    arcpy.Merge_management(inputs="AvalanchePathClass2;Class2", output="Class2_Merge", field_mappings='Id "Id" true true false 10 Long 0 10 ,First,#,AvalanchePathClass2,Id,-1,-1,Class2,Id,-1,-1;gridcode "gridcode" true true false 10 Long 0 10 ,First,#,AvalanchePathClass2,gridcode,-1,-1,Class2,gridcode,-1,-1')

    # --- Dissolve - Class 2
    arcpy.Dissolve_management(in_features="Class2_Merge", out_feature_class="Class2_Dissolve", dissolve_field="gridcode", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES")

    # --- Eliminate Polygon Part (All Clusters Smaller Than 10.000 Square Meters Are Eliminated Within The Polygon)
    arcpy.EliminatePolygonPart_management(in_features="Class2_Dissolve", out_feature_class="Class2_Eliminate", condition="AREA", part_area="10000 SquareMeters", part_area_percent="0", part_option="CONTAINED_ONLY")

    print ( "Class 2 Merged")

    # --- Merge - Class 3
Example #24
0
def Schools(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_EduPts.gdb')
    except:
        print 'Schools GDB already exists'
    workDir = str(workFld) + '/' + city + '_EduPts.gdb'
    arcpy.env.workspace = workDir
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    """ Input Directory """
    inDir = str(inDir) + '/Input.gdb'
    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_EduLowGS_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        reportFile = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first line of report file """
        print 'Schools Start Time: ' + time.asctime()
        state = city[-2:]
        if state in ['CO', 'IL', 'KS', 'MT', 'NE', 'NH', 'OR', 'WA']:
            reportFile.write(
                "Begin with 2011 HSIP (Homeland Security Infrastructure Program) point layers for public schools, and private schools where public and private schools have been merged into one K-12 layer. Also, begin with the 2014 HSIP day cares point layer.--201203--\n"
            )
        else:
            reportFile.write(
                "Begin with 2011 HSIP (Homeland Security Infrastructure Program) point layers for daycares, public schools, and private schools where public and private schools have been merged into one K-12 layer.--201203--\n"
            )
        reportFile.write(
            "Use spatial join to add the Census Block Group GeoID of each school to the school's attribute record--201203--\n"
        )

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.snapRaster = freqDir + '/LC'
        """-------- Prepare Daycare and K12 Points -------------------------------"""
        """ Clip the Daycare and K12 points to the city boundary """
        arcpy.Clip_analysis(inDir + '/Daycares', freqDir + '/Bnd_5km',
                            'Daycares_Alb')
        arcpy.Clip_analysis(inDir + '/K12', freqDir + '/Bnd_5km', 'K12_Alb')
        reportFile.write(
            "Clip each point layer to the EnviroAtlas community boundary.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Determine the Projection of the LC """
        descLC = arcpy.Describe(str(freqDir) + '/LC')
        """ Project the Daycare and K12 points into the LC's projection """
        arcpy.Project_management('Daycares_Alb', 'Daycares',
                                 descLC.spatialReference)
        arcpy.Project_management('K12_Alb', 'K12', descLC.spatialReference)
        reportFile.write(
            "Project each point layer into the projection of the land cover.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Prepare Land Cover -------------------------------------------"""
        """ Reclassify LC into Binary Green Space """
        if arcpy.Exists(str(freqDir) + '/GreenIO') == False:
            outReclass5 = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            outReclass5.save(str(freqDir) + '/GreenIO')
            reportFile.write(
                "Reclassify the 1-Meter EnviroAtlas Land Cover Classification for the EnviroAtlas community into Binary Green Space. REPLACE-GSE--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("GreenIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')

        else:
            reportFile.write(
                "Reclassify the 1-Meter EnviroAtlas Land Cover Classification for the EnviroAtlas community into Binary Green Space. REPLACE-GSE--GreenIO--"
                + '--\n')
        """ Moving Window for Schools - Greenspace, Circle 100 Meters """
        outFocalStat1 = arcpy.sa.FocalStatistics(
            str(freqDir) + '/GreenIO', arcpy.sa.NbrCircle(100, 'CELL'), 'SUM',
            'NODATA')
        outFocalStat1.save('Gre_100C')
        reportFile.write(
            "Run Focal Statistics on the Green Space Binary Raster with a circular window of 100 meters and statistics = SUM.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Analyze Green Space at School Locations -------------------"""
        """ Extract GS Values at Points """
        arcpy.sa.ExtractValuesToPoints('Daycares', 'Gre_100C', 'Day_Green',
                                       'NONE', 'VALUE_ONLY')
        arcpy.sa.ExtractValuesToPoints('K12', 'Gre_100C', 'K12_Green', 'NONE',
                                       'VALUE_ONLY')
        reportFile.write(
            "Extract Values to Points from the focal statistics raster to both the Daycare and K12 points with Census Block Group GeoIDs and append values to the point file--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add Field to Point Layers """
        arcpy.AddField_management('Day_Green', 'Green_Pct', 'DOUBLE')
        arcpy.AddField_management('K12_Green', 'Green_Pct', 'DOUBLE')
        """ Calculate Percent Greenspce """
        arcpy.CalculateField_management('Day_Green', 'Green_Pct',
                                        'float(!RASTERVALU!) /31417 *100',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('K12_Green', 'Green_Pct',
                                        'float(!RASTERVALU!) /31417 *100',
                                        'PYTHON_9.3')
        reportFile.write(
            "Add new field to each point layer: Green_Pct (float) and calculate where Green_Pct = RASTERVALU / 31417 * 100 (limited to 2 decimal places).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Count number of Schools per Block Group """
        arcpy.Statistics_analysis('Day_Green', 'Day_Num',
                                  [['CAPACITY', 'COUNT']], 'bgrp')
        arcpy.Statistics_analysis('K12_Green', 'K12_Num',
                                  [['ENROLLMENT', 'COUNT']], 'bgrp')
        """ Select low Greespace Schools and Count per Block Group """
        arcpy.Select_analysis('Day_Green', 'Day_Low', 'Green_Pct <= 25')
        arcpy.Statistics_analysis('Day_Low', 'Day_NumLow',
                                  [['CAPACITY', 'COUNT']], 'bgrp')

        arcpy.Select_analysis('K12_Green', 'K12_Low', 'Green_Pct <= 25')
        arcpy.Statistics_analysis('K12_Low', 'K12_NumLow',
                                  [['ENROLLMENT', 'COUNT']], 'bgrp')
        reportFile.write(
            "From each point layer, select records with Green_Pct <= 25, then summarize the count of selected schools by block group.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final table """
        arcpy.TableToTable_conversion(freqDir + '/BG', workDir, 'EduPts', '',
                                      'bgrp')
        arcpy.DeleteField_management('EduPts', [
            'PLx2_Pop', 'PLx2_Pct', 'SUM_HOUSIN', 'SUM_POP10', 'under_1',
            'under_1pct', 'under_13', 'under_13pc', 'over_70', 'over_70pct',
            'Shape_Length', 'Shape_Leng', 'NonWhite', 'NonWt_Pct',
            'Shape_Le_1', 'Shape_Area', 'Density', 'LandA_M', 'EAID',
            'Dasy_Pop', 'State'
        ])
        reportFile.write(
            "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP field.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add fields to new table """
        arcpy.AddField_management('EduPts', 'Day_Count', 'DOUBLE')
        arcpy.AddField_management('EduPts', 'Day_Low', 'DOUBLE')
        arcpy.AddField_management('EduPts', 'K12_Count', 'DOUBLE')
        arcpy.AddField_management('EduPts', 'K12_Low', 'DOUBLE')
        reportFile.write(
            "Add fields to the new table for K12_Count (short), K12_Low (short), Day_Count (short), and Day_Low (short).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join Each Table to the final table and calculate necessary records """
        arcpy.JoinField_management('EduPts', 'bgrp', 'Day_Num', 'bgrp',
                                   ['FREQUENCY'])
        arcpy.CalculateField_management('EduPts', 'Day_Count', '!FREQUENCY!',
                                        'PYTHON')
        arcpy.DeleteField_management('EduPts', 'FREQUENCY')

        arcpy.JoinField_management('EduPts', 'bgrp', 'Day_NumLow', 'bgrp',
                                   ['FREQUENCY'])
        arcpy.CalculateField_management('EduPts', 'Day_Low', '!FREQUENCY!',
                                        'PYTHON')
        arcpy.DeleteField_management('EduPts', 'FREQUENCY')

        arcpy.JoinField_management('EduPts', 'bgrp', 'K12_Num', 'bgrp',
                                   ['FREQUENCY'])
        arcpy.CalculateField_management('EduPts', 'K12_Count', '!FREQUENCY!',
                                        'PYTHON')
        arcpy.DeleteField_management('EduPts', 'FREQUENCY')

        arcpy.JoinField_management('EduPts', 'bgrp', 'K12_NumLow', 'bgrp',
                                   ['FREQUENCY'])
        arcpy.CalculateField_management('EduPts', 'K12_Low', '!FREQUENCY!',
                                        'PYTHON')
        arcpy.DeleteField_management('EduPts', 'FREQUENCY')
        reportFile.write(
            "Join each of the summarized tables with the new table and calculate the corresponding field in the new table.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate NULL values, where applicable """
        arcpy.MakeTableView_management('EduPts', 'EduPtsTbl')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'NEW_SELECTION',
                                                'Day_Count IS NULL')
        arcpy.CalculateField_management('EduPtsTbl', 'Day_Count', '0',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('EduPtsTbl', 'Day_Low', '-99999',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'CLEAR_SELECTION')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'NEW_SELECTION',
                                                'Day_Low IS NULL')
        arcpy.CalculateField_management('EduPtsTbl', 'Day_Low', '0',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'CLEAR_SELECTION')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'NEW_SELECTION',
                                                'K12_Count IS NULL')
        arcpy.CalculateField_management('EduPtsTbl', 'K12_Count', '0',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('EduPtsTbl', 'K12_Low', '-99999',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'CLEAR_SELECTION')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'NEW_SELECTION',
                                                'K12_Low IS NULL')
        arcpy.CalculateField_management('EduPtsTbl', 'K12_Low', '0',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('EduPtsTbl', 'CLEAR_SELECTION')
        reportFile.write(
            "Calculate fields where K12_Count = 0: K12_Low = -99999 and Day_Count = 0: Day_Low = -99999--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            arcpy.env.extent = freqDir + '/LC'
            arcpy.env.snapRaster = freqDir + '/LC'
            ReC = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
        """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
        """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            """ Identify BGs within 50m of the LC edge """
            arcpy.Buffer_analysis(
                str(freqDir) + '/LC_Poly', 'LC_Poly_Minus100', '-100 meters')
            arcpy.MakeFeatureLayer_management('LC_Poly_Minus100', 'Minus100')
            arcpy.MakeFeatureLayer_management('Day_Low', 'D_L')
            arcpy.MakeFeatureLayer_management('K12_Low', 'K_L')

            arcpy.SelectLayerByLocation_management('D_L', 'WITHIN', 'Minus100',
                                                   '', 'NEW_SELECTION',
                                                   'INVERT')
            arcpy.SelectLayerByLocation_management('K_L', 'WITHIN', 'Minus100',
                                                   '', 'NEW_SELECTION',
                                                   'INVERT')

            dValue = float(arcpy.GetCount_management('D_L').getOutput(0))
            kValue = float(arcpy.GetCount_management('K_L').getOutput(0))
            """ For all BGs too close to the LC edge, assign both fields a value of -99998 """
            if dValue > 0:
                bgrps = []
                cursor = arcpy.SearchCursor('D_L')
                for row in cursor:
                    value = row.getValue('bgrp')
                    bgrps.append(value)
                bgrps = list(set(bgrps))
                expression = ''
                for bgrp in bgrps:
                    expression = expression + " OR bgrp = '" + str(bgrp) + "'"
                expression = expression[4:]
                arcpy.SelectLayerByAttribute_management(
                    'EduPtsTbl', 'NEW_SELECTION', expression)
                arcpy.CalculateField_management('EduPtsTbl', 'Day_Low',
                                                '-99998', 'PYTHON_9.3')
                arcpy.SelectLayerByAttribute_management(
                    'EduPtsTbl', 'CLEAR_SELECTION')

            if kValue > 0:
                bgrps = []
                cursor = arcpy.SearchCursor('K_L')
                for row in cursor:
                    value = row.getValue('bgrp')
                    bgrps.append(value)
                bgrps = list(set(bgrps))
                expression = ''
                for bgrp in bgrps:
                    expression = expression + " OR bgrp = '" + str(bgrp) + "'"
                expression = expression[4:]
                arcpy.SelectLayerByAttribute_management(
                    'EduPtsTbl', 'NEW_SELECTION', expression)
                arcpy.CalculateField_management('EduPtsTbl', 'K12_Low',
                                                '-99998', 'PYTHON_9.3')
                arcpy.SelectLayerByAttribute_management(
                    'EduPtsTbl', 'CLEAR_SELECTION')
            arcpy.SelectLayerByAttribute_management('D_L', 'CLEAR_SELECTION')
            arcpy.SelectLayerByAttribute_management('K_L', 'CLEAR_SELECTION')

            if kValue > 0 or dValue > 0:
                reportFile.write(
                    "Calculate Field for BGs within 50m of the edge of the land cover, All Fields = -99998.--"
                    + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final table """
        arcpy.CopyRows_management('EduPtsTbl', 'EduLowGS')
        try:
            arcpy.Delete_management(finalDir + '/' + str(city) + '_EduLowGS')
        except:
            pass
        arcpy.TableToTable_conversion('EduLowGS', finalDir, city + '_EduLowGS')
        allFields = [
            f.name
            for f in arcpy.ListFields(finalDir + '/' + city + '_EduLowGS')
        ]
        for field in allFields:
            if field not in [
                    'bgrp', 'OBJECTID', 'Day_Count', 'Day_Low', 'K12_Count',
                    'K12_Low'
            ]:
                arcpy.DeleteField_management(
                    finalDir + '/' + city + '_EduLowGS', [field])

        reportFile.write(
            "Export the fields to be displayed in EnviroAtlas to a final gdb table: K12_Count, K12_Low, Day_Count, Day_Low.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        print 'Schools End Time: ' + time.asctime() + '\n'

        #-------- COMPELETE LOGFILES ---------------------------------------------
        reportFile.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        reportFile.write("\nSomething went wrong.\n\n")
        reportFile.write("Pyton Traceback Message below:")
        reportFile.write(traceback.format_exc())
        reportFile.write("\nArcMap Error Messages below:")
        reportFile.write(arcpy.GetMessages(2))
        reportFile.write("\nArcMap Warning Messages below:")
        reportFile.write(arcpy.GetMessages(1))

        reportFile.write("\n\nEnded at " + time.asctime() + '\n')
        reportFile.write("\n---End of Log File---\n")

        if reportFile:
            reportFile.close()
Example #25
0
def createVectorBoundaryC(f_path, f_name, raster_props, stat_out_folder, vector_bound_path, minZ, maxZ, bound_path, elev_type):
    a = datetime.now()
    arcpy.AddMessage("\tCreating {} bound for '{}' using min z '{}' and max z'{}'".format(elev_type, f_path, minZ, maxZ))

    vector_1_bound_path = os.path.join(stat_out_folder, "B1_{}.shp".format(f_name))
    vector_2_bound_path = os.path.join(stat_out_folder, "B2_{}.shp".format(f_name))
    vector_3_bound_path = os.path.join(stat_out_folder, "B3_{}.shp".format(f_name))
    vector_4_bound_path = os.path.join(stat_out_folder, "B4_{}.shp".format(f_name))
    vector_5_bound_path = os.path.join(stat_out_folder, "B5_{}.shp".format(f_name))
    deleteFileIfExists(vector_bound_path, useArcpy=True)
    deleteFileIfExists(vector_1_bound_path, useArcpy=True)
    deleteFileIfExists(vector_2_bound_path, useArcpy=True)
    deleteFileIfExists(vector_3_bound_path, useArcpy=True)
    deleteFileIfExists(vector_4_bound_path, useArcpy=True)
    deleteFileIfExists(vector_5_bound_path, useArcpy=True)

    arcpy.RasterDomain_3d(in_raster=f_path, out_feature_class=vector_5_bound_path, out_geometry_type="POLYGON")
    Utility.addToolMessages()

    arcpy.MultipartToSinglepart_management(in_features=vector_5_bound_path, out_feature_class=vector_4_bound_path)
    Utility.addToolMessages()
    checkRecordCount(vector_4_bound_path)

    arcpy.EliminatePolygonPart_management(in_features=vector_4_bound_path, out_feature_class=vector_3_bound_path, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY")
    Utility.addToolMessages()
    checkRecordCount(vector_3_bound_path)

    arcpy.SimplifyPolygon_cartography(
        in_features=vector_3_bound_path,
        out_feature_class=vector_2_bound_path,
        algorithm="POINT_REMOVE",
        tolerance="{} Meters".format(C_SIMPLE_DIST),
        minimum_area="0 Unknown",
        error_option="RESOLVE_ERRORS",
        collapsed_point_option="NO_KEEP",
        in_barriers=""
        )
    Utility.addToolMessages()
    checkRecordCount(vector_2_bound_path)

    arcpy.AddMessage('ZFlag: ' + arcpy.env.outputZFlag)
    arcpy.AddMessage('MFlag: ' + arcpy.env.outputMFlag)

    arcpy.Dissolve_management(in_features=vector_2_bound_path, out_feature_class=vector_1_bound_path, dissolve_field="", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES")
    Utility.addToolMessages()
    checkRecordCount(vector_1_bound_path)

    deleteFields(vector_1_bound_path)

    record_count = checkRecordCount(vector_1_bound_path)
    footprint_area = 0
    for row in arcpy.da.SearchCursor(vector_1_bound_path, ["SHAPE@"]):  # @UndefinedVariable
        shape = row[0]
        footprint_area = shape.getArea ("PRESERVE_SHAPE", "SQUAREMETERS")

    if footprint_area <= 0:
        arcpy.AddMessage("\tWARNGING: Area is 0 in {} '{}' bound '{}'".format(elev_type, f_path, vector_bound_path))

    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[PATH][0], field_alias=FIELD_INFO[PATH][1], field_type=FIELD_INFO[PATH][2], field_length=FIELD_INFO[PATH][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[NAME][0], field_alias=FIELD_INFO[NAME][1], field_type=FIELD_INFO[NAME][2], field_length=FIELD_INFO[NAME][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[AREA][0], field_alias=FIELD_INFO[AREA][1], field_type=FIELD_INFO[AREA][2], field_length=FIELD_INFO[AREA][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[ELEV_TYPE][0], field_alias=FIELD_INFO[ELEV_TYPE][1], field_type=FIELD_INFO[ELEV_TYPE][2], field_length=FIELD_INFO[ELEV_TYPE][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[RANGE][0], field_alias=FIELD_INFO[RANGE][1], field_type=FIELD_INFO[RANGE][2], field_length=FIELD_INFO[RANGE][3])

    deleteFields(vector_1_bound_path)

    arcpy.AddMessage(raster_props)
    for field_name in KEY_LIST:
        time.sleep(0.25)
        field_shpname = FIELD_INFO[field_name][0]
        field_alias = FIELD_INFO[field_name][1]
        field_type = FIELD_INFO[field_name][2]
        field_length = FIELD_INFO[field_name][3]
        field_value = raster_props[field_name]
        if field_type == "TEXT":
            if str(field_value).endswith('\\'):
                field_value = str(field_value)[0:-1]
            field_value = r'"{}"'.format(field_value)

        addField(in_table=vector_1_bound_path, field_name=field_shpname, field_alias=field_alias, field_type=field_type, field_length=field_length, expression=field_value)


    b_f_path, b_f_name = os.path.split(f_path)
    b_f_name = os.path.splitext(b_f_name)[0]
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[PATH][0], expression='"{}"'.format(b_f_path), expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[NAME][0], expression='"{}"'.format(b_f_name), expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[AREA][0], expression=footprint_area, expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[ELEV_TYPE][0], expression='"{}"'.format(elev_type), expression_type="PYTHON_9.3")
    try:
        z_expr = "!{}! - !{}!".format(FIELD_INFO[MAX][0], FIELD_INFO[MIN][0])
        arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[RANGE][0], expression=z_expr, expression_type="PYTHON_9.3")
    except:
        pass

    deleteFileIfExists(vector_bound_path, True)
    arcpy.Clip_analysis(in_features=vector_1_bound_path, clip_features=bound_path, out_feature_class=vector_bound_path, cluster_tolerance="")
    Utility.addToolMessages()
    checkRecordCount(vector_bound_path)

    deleteFields(vector_bound_path)

    #debug = False
    #try:
    #    debug = (str(f_path).find("alamazoo") >= 0)
    #except:
    #    debug = False
    #if not debug:
    deleteFileIfExists(vector_1_bound_path, useArcpy=True)
    deleteFileIfExists(vector_2_bound_path, useArcpy=True)
    deleteFileIfExists(vector_3_bound_path, useArcpy=True)
    deleteFileIfExists(vector_4_bound_path, useArcpy=True)
    deleteFileIfExists(vector_5_bound_path, useArcpy=True)
    #else:
    #    arcpy.AddMessage("\tleaving artifacts for {} '{}'".format(elev_type, vector_bound_path))

    doTime(a, "\tCreated BOUND {}".format(vector_bound_path))
Example #26
0
def RB(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_RB.gdb')
    except:
        pass
    workDir = str(workFld) + '/' + city + '_RB.gdb'
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    """ NHD Directory """
    NHDDir = str(inDir) + '/NHD.gdb/'
    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_RB_LC_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        rbLCRF = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'

        steps = []
        """ Write out first line of report file """
        print 'Riparian Buffer Start Time: ' + time.asctime()
        steps.append(
            "Begin with the High Resolution National Hydrography Dataset for REPLACE-STATE--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.snapRaster = freqDir + '/LC'
        """ Set Projection Files """
        prjNumb = arcpy.Describe(str(freqDir) + '/LC').spatialReference.name
        prjNumb = prjNumb[-3:]
        prjfile = prjDir + '/NAD 1983 UTM Zone ' + prjNumb + '.prj'
        """-------- PREPARE HYDROLINES ----------------------------- """
        """ Clip to 5km Boundary, Project, Clip to 1km Boundary """

        steps.append(
            "Extract the Flowlines, Waterbodies, and Areas for 5-km around the EnviroAtlas community boundary.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Project the Flowlines, Waterbodies, and Areas into UTM Projection.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Clip the Flowlines, Waterbodies, and Areas to the 1-km boundary of the EnviroAtlas community boundary.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        for h in ['Area', 'Flowline', 'Waterbody']:
            arcpy.MakeFeatureLayer_management(inDir + '/Input.gdb/States',
                                              'States')
            arcpy.SelectLayerByLocation_management('States', 'INTERSECT',
                                                   freqDir + '/Bnd_5km', '',
                                                   'NEW_SELECTION')
            AbStates = []
            cursor = arcpy.SearchCursor("States", "", "", "Abbrev", "")
            for row in cursor:
                ab = row.getValue("Abbrev")
                AbStates.append(str(ab))
            if len(AbStates) > 1:
                for st in AbStates:
                    try:
                        if st == 'IN':
                            st = 'Indiana'
                        NHDDir2 = NHDDir + str(st) + '/' + st
                        arcpy.Clip_analysis(
                            NHDDir2 + '_NHD' + str(h), freqDir + '/Bnd_5km',
                            workDir + '/' + str(h) + '_5km_' + str(st))
                    except:
                        arcpy.env.outputMFlag = "Disabled"
                        arcpy.env.outputZFlag = "Disabled"
                        if st == 'IN':
                            st = 'Indiana'
                        NHDDir2 = NHDDir + str(st) + '/' + st
                        arcpy.Clip_analysis(
                            NHDDir2 + '_NHD' + str(h), freqDir + '/Bnd_5km',
                            workDir + '/' + str(h) + '_5km_' + str(st))
                        arcpy.ClearEnvironment("outputMFlag")
                        arcpy.ClearEnvironment("outputZFlag")
                        steps.append(
                            "The NHD layer -- " + str(st) + "_NHD" + str(h) +
                            " -- reqiured disabling M/Z flags for clipping. Flags reset to defaults."
                        )
                feat = arcpy.ListFeatureClasses(h + '_5km_*')
                arcpy.Merge_management(feat, h + '_5km')
                descLC = arcpy.Describe(str(freqDir) + '/LC')
                arcpy.Project_management(h + '_5km', h + '_UTM',
                                         descLC.spatialReference)
                arcpy.Clip_analysis(h + '_UTM', freqDir + '/Bnd_1km',
                                    'City_' + h)

            else:
                state = city[-2:]
                NHDDir2 = NHDDir + str(state) + '/' + state
                arcpy.Clip_analysis(NHDDir2 + '_NHD' + str(h),
                                    freqDir + '/Bnd_5km', h + '_5km')
                descLC = arcpy.Describe(str(freqDir) + '/LC')
                arcpy.Project_management(h + '_5km', h + '_UTM',
                                         descLC.spatialReference)
                arcpy.Clip_analysis(h + '_UTM', freqDir + '/Bnd_1km',
                                    'City_' + h)
        """ Select the appropriate types of features from each feature class """
        arcpy.Select_analysis(
            'City_Area', 'Area_WLRSR',
            '"FType" = 484 OR "FType" = 398 OR "FType" = 431 OR "FType" = 460')
        steps.append(
            "Select by Attribute the Areas that are classified as Wash, Lock, Rapid, Stream/River (484, 398, 431, 460).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.Select_analysis(
            'City_Flowline', 'Flowline_SRCAP',
            '"FType" = 334 OR "FType" = 460 OR "FType" = 558')
        steps.append(
            "Select by Attributes the Flowlines that are classified as Stream/River and Connector (460, 334).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.Select_analysis('City_Flowline', 'Flowline_SRC',
                              '"FType" = 334 OR "FType" = 460')
        steps.append(
            "Select by Attributes the Flowlines that are classified as Stream/River, Connector, and Artificial Path (460, 334, 558).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.Select_analysis(
            'City_Waterbody', 'Waterbody_RLPIM',
            '"FType" = 436 OR "FType" = 390 OR "FType" = 378')
        steps.append(
            "Select by Attributes the Waterbodies that are classified as Reservoir, Lake/Pond, or Ice Mass (436, 390, 378).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Make Layer Files to use the Select by Location Tool """
        arcpy.MakeFeatureLayer_management('Area_WLRSR', 'Area_Lyr')
        arcpy.MakeFeatureLayer_management('Waterbody_RLPIM', 'Waterbody_Lyr')
        """ Select hydrologically connected waterbodies and areas and save as new feature classes """
        for t in ['Area', 'Waterbody']:
            arcpy.SelectLayerByLocation_management(t + '_Lyr', 'INTERSECT',
                                                   'Flowline_SRCAP')
            arcpy.CopyFeatures_management(t + '_Lyr', t + '_Conn')

        steps.append(
            "Select by Location from the selected features in Areas and Waterbodies the features that intersect with the Stream/River, Connection, Artificial Path Flowlines (hydrologically connected areas).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Run Block Group Summaries ----------------------------- """
        """ CREATE BUFFERS """
        for d in ['15', '50']:
            """ Create buffers """
            arcpy.Buffer_analysis('Flowline_SRC', 'Flowline_' + str(d),
                                  str(d) + ' Meters', 'FULL', 'ROUND', 'ALL')
            arcpy.Buffer_analysis('Waterbody_Conn', 'Waterbody_' + str(d),
                                  str(d) + ' Meters', 'OUTSIDE_ONLY', 'ROUND',
                                  'ALL')
            arcpy.Buffer_analysis('Area_Conn', 'Area_' + str(d),
                                  str(d) + ' Meters', 'OUTSIDE_ONLY', 'ROUND',
                                  'ALL')
            """ Merge Buffers """
            arcpy.Merge_management([
                'Flowline_' + str(d), 'Waterbody_' + str(d), 'Area_' + str(d)
            ], 'RB_' + str(d) + '_mess')
            """ Dissolve Buffers """
            arcpy.Dissolve_management('RB_' + str(d) + '_mess', 'RB_' + str(d))
            """ Intersect Buffers with BGs """
            arcpy.Intersect_analysis(['RB_' + str(d), freqDir + '/BG'],
                                     'RB_' + str(d) + '_BG', 'ALL')
            """ Tabulate the Land Cover Areas in the Buffers by BG """
            arcpy.sa.TabulateArea('RB_' + str(d) + '_BG', 'bgrp',
                                  freqDir + '/LC', 'Value',
                                  'LC_RB' + str(d) + '_tbl', 1)
            arcpy.CopyRows_management('LC_RB' + str(d) + '_tbl',
                                      'TA_' + str(d) + '_Copy')

        steps.append(
            "Buffer the Hydrologically Connected Waterbodies and Areas and the Flowlines (Stream/River, Connector) by 15 meters and 50 meters.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Merge and dissolve the 50m buffers into one shapefile. Repeat for 15m buffers.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Intersect each buffer area with the Census Block Groups for the EnviroAtlas community.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Using the 1-Meter Land Cover Classification for the EnviroAtlas community, tabulate the area of each land cover class within the riparian buffers in each Census Block Group as well as the area of each land cover class within each whole Census Block Group.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Rename fields in the Tabulate Area Tables """
        for d in ['15', '50']:
            for type, val in [('UnC', 0), ('Water', 10), ('Imp', 20),
                              ('SB', 30), ('TF', 40), ('Shrub', 52),
                              ('GH', '70'), ('Ag', 80), ('Orch', 81),
                              ('WW', 91), ('EW', 92)]:
                try:
                    arcpy.AlterField_management('LC_RB' + str(d) + '_tbl',
                                                'VALUE_' + str(val), str(type))
                except:
                    arcpy.AddField_management('LC_RB' + str(d) + '_tbl',
                                              str(type), 'DOUBLE')
                    arcpy.CalculateField_management('LC_RB' + str(d) + '_tbl',
                                                    str(type), 0, 'PYTHON_9.3')
            """ Add Fields """
            for fld in ['_LArea', '_ImpM', '_ForM', '_VegM']:
                arcpy.AddField_management('LC_RB' + str(d) + '_tbl',
                                          'RB' + str(d) + str(fld), 'LONG')
            for fld in ['_LABGP', '_ImpP', '_ForP', '_VegP']:
                arcpy.AddField_management('LC_RB' + str(d) + '_tbl',
                                          'RB' + str(d) + str(fld), 'FLOAT')
            """ Calculate LC Combinations """
            arcpy.CalculateField_management(
                'LC_RB' + str(d) + '_tbl', 'RB' + str(d) + '_LArea',
                "!Imp! + !SB! + !TF! + !Shrub! + !GH! + !Ag! + !Orch! + !WW! + !EW!",
                'PYTHON_9.3')
            arcpy.CalculateField_management('LC_RB' + str(d) + '_tbl',
                                            'RB' + str(d) + '_ImpM', "!Imp!",
                                            'PYTHON_9.3')
            arcpy.CalculateField_management('LC_RB' + str(d) + '_tbl',
                                            'RB' + str(d) + '_ForM',
                                            "!TF! + !WW!", 'PYTHON_9.3')
            arcpy.CalculateField_management(
                'LC_RB' + str(d) + '_tbl', 'RB' + str(d) + '_VegM',
                "!TF! + !Shrub! + !GH! + !WW! + !EW!", 'PYTHON_9.3')
            """ Calcualte Percentages """
            arcpy.CalculateField_management(
                'LC_RB' + str(d) + '_tbl', 'RB' + str(d) + '_ImpP',
                '"%.2f" % ((float(!RB' + str(d) + '_ImpM!)/float(!RB' +
                str(d) + '_LArea!))*100)', 'PYTHON_9.3')
            arcpy.CalculateField_management(
                'LC_RB' + str(d) + '_tbl', 'RB' + str(d) + '_ForP',
                '"%.2f" % ((float(!RB' + str(d) + '_ForM!)/float(!RB' +
                str(d) + '_LArea!))*100)', 'PYTHON_9.3')
            arcpy.CalculateField_management(
                'LC_RB' + str(d) + '_tbl', 'RB' + str(d) + '_VegP',
                '"%.2f" % ((float(!RB' + str(d) + '_VegM!)/float(!RB' +
                str(d) + '_LArea!))*100)', 'PYTHON_9.3')

        steps.append(
            "In Tabulate Area Table for the 50 Meter buffers, add fields RB50_LArea, RB50_LABGP, RB50_ImpM, RB50_ForM, RB50_VegM, RB50_ImpP, RB50_ForP, RB50_VegP--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Calculate fields: RB50_LArea = REPLACE-RBLA; RB50_ImpM = Impervious; RB50_ForM = REPLACE-RBRF; RB50_VegM = REPLACE-RBVG.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Calculate fields: RB50_ImpP = RB50_ImpM/RB50_LArea*100; RB50_VegP = RB50_VegM/RB50_LArea*100; RB50_ForP = RB50_ForM/RB50_LArea*100. Limit all fields to 2 decimal places.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        steps.append(
            "Repeat previous 3 steps for 15 Meter Tabulate Area Table.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Copy Fields to Combined Table """
        arcpy.TableToTable_conversion(freqDir + '/BG', workDir,
                                      city + '_RipBuff_LC', '', 'bgrp')
        arcpy.DeleteField_management(city + '_RipBuff_LC', [
            'SUM_POP10', 'NonWhite', 'Black', 'Blackpct', 'PLx2_Pop',
            'PLx2_Pct', 'PopWithin', 'PctWithin', 'Area', 'SUM_HOUSIN',
            'under_1', 'under_1pct', 'under_13', 'under_13pc', 'over_70',
            'over_70pct', 'Shape_Length', 'Shape_Leng', 'NonWhite_Pop',
            'NonWt_Pct', 'Shape_Le_1', 'Shape_Area', 'Dasy_Pop', 'State'
        ])
        steps.append(
            "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP and LandA_M fields. Join all calculated fields from both of the Tabulate Area Tables with the new BG table.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join Fields to Final Table """
        arcpy.JoinField_management(city + '_RipBuff_LC', 'bgrp', 'LC_RB50_tbl',
                                   'bgrp', [
                                       'RB50_LArea', 'RB50_LABGP', 'RB50_ImpP',
                                       'RB50_ForP', 'RB50_VegP'
                                   ])
        arcpy.JoinField_management(city + '_RipBuff_LC', 'bgrp', 'LC_RB15_tbl',
                                   'bgrp', [
                                       'RB15_LArea', 'RB15_LABGP', 'RB15_ImpP',
                                       'RB15_ForP', 'RB15_VegP'
                                   ])
        """ Fix Null Values """
        arcpy.MakeTableView_management(city + '_RipBuff_LC', 'RipBuff_tbl')
        arcpy.SelectLayerByAttribute_management('RipBuff_tbl', 'NEW_SELECTION',
                                                'RB50_LArea IS NULL')
        arcpy.CalculateField_management('RipBuff_tbl', 'RB50_LArea', 0,
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('RipBuff_tbl',
                                                'CLEAR_SELECTION')
        arcpy.SelectLayerByAttribute_management('RipBuff_tbl', 'NEW_SELECTION',
                                                'RB15_LArea IS NULL')
        arcpy.CalculateField_management('RipBuff_tbl', 'RB15_LArea', 0,
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('RipBuff_tbl',
                                                'CLEAR_SELECTION')

        arcpy.CalculateField_management(
            'RipBuff_tbl', 'RB50_LABGP',
            '"%.2f" % ((float(!RB50_LArea!)/float(!LandA_M!)) * 100)',
            'PYTHON_9.3')
        arcpy.CalculateField_management(
            'RipBuff_tbl', 'RB15_LABGP',
            '"%.2f" % ((float(!RB15_LArea!)/float(!LandA_M!)) * 100)',
            'PYTHON_9.3')

        steps.append(
            "For all BGs with no riparian buffer area, calculate field RB50_LArea = 0. Calculate Field RB50_LABGP = RB50_LArea/LandA_M*100. Repeat for 15 meter buffers.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        arcpy.SelectLayerByAttribute_management('RipBuff_tbl', 'NEW_SELECTION',
                                                'RB50_LArea = 0')
        for fld in ['RB50_ImpP', 'RB50_ForP', 'RB50_VegP']:
            arcpy.CalculateField_management('RipBuff_tbl', fld, -99999,
                                            'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('RipBuff_tbl',
                                                'CLEAR_SELECTION')

        arcpy.SelectLayerByAttribute_management('RipBuff_tbl', 'NEW_SELECTION',
                                                'RB15_LArea = 0')
        for fld in ['RB15_ImpP', 'RB15_ForP', 'RB15_VegP']:
            arcpy.CalculateField_management('RipBuff_tbl', fld, -99999,
                                            'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('RipBuff_tbl',
                                                'CLEAR_SELECTION')
        steps.append(
            "Calculate fields where RB50_LArea = 0: RB50_ImpP, RB50_VegP, RB50_ForP = -99999; Repeat for RB15.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            arcpy.env.extent = freqDir + '/LC'
            arcpy.env.snapRaster = freqDir + '/LC'
            ReC = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
        """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
        """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            for width in [15, 50]:
                """ Identify BGs within half the analysis width of the LC edge """
                arcpy.Buffer_analysis(
                    str(freqDir) + '/LC_Poly',
                    'LC_Poly_Minus' + str(int(width / 2)),
                    '-' + str(width) + ' meters')
                arcpy.MakeFeatureLayer_management(
                    'LC_Poly_Minus' + str(int(width / 2)), 'Minus')
                arcpy.MakeFeatureLayer_management(freqDir + '/BG', 'BG')

                arcpy.SelectLayerByLocation_management('BG', 'WITHIN', 'Minus',
                                                       '', 'NEW_SELECTION',
                                                       'INVERT')

                bgValue = float(arcpy.GetCount_management('BG').getOutput(0))
                """ For all BGs too close to the LC edge, assign both fields a value of -99998 """
                if bgValue > 0:
                    bgrps = []
                    cursor = arcpy.SearchCursor('BG')
                    for row in cursor:
                        value = row.getValue('bgrp')
                        bgrps.append(value)
                    bgrps = list(set(bgrps))
                    expression = ''
                    for bgrp in bgrps:
                        expression = expression + " OR bgrp = '" + str(
                            bgrp) + "'"
                    expression = expression[4:]
                    arcpy.SelectLayerByAttribute_management(
                        'RipBuff_tbl', 'NEW_SELECTION', expression)
                    arcpy.CalculateField_management(
                        'RipBuff_tbl', 'RB' + str(width) + '_LArea', '-99998',
                        'PYTHON_9.3')
                    arcpy.CalculateField_management(
                        'RipBuff_tbl', 'RB' + str(width) + '_LABGP', '-99998',
                        'PYTHON_9.3')
                    arcpy.CalculateField_management(
                        'RipBuff_tbl', 'RB' + str(width) + '_ImpP', '-99998',
                        'PYTHON_9.3')
                    arcpy.CalculateField_management(
                        'RipBuff_tbl', 'RB' + str(width) + '_ForP', '-99998',
                        'PYTHON_9.3')
                    arcpy.CalculateField_management(
                        'RipBuff_tbl', 'RB' + str(width) + '_VegP', '-99998',
                        'PYTHON_9.3')
                    arcpy.SelectLayerByAttribute_management(
                        'RipBuff_tbl', 'CLEAR_SELECTION')
                steps.append(
                    "Calculate Field for BGs within " + str(width) +
                    "m of the edge of the land cover, all fields = -99998.--" +
                    time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Final Table """
        arcpy.CopyRows_management('RipBuff_tbl', 'RB_LC')
        try:
            arcpy.Delete_management(finalDir + '/' + str(city) + '_RB_LC')
        except:
            pass
        arcpy.TableToTable_conversion('RB_LC', finalDir, city + '_RB_LC')
        allFields = [
            f.name for f in arcpy.ListFields(finalDir + '/' + city + '_RB_LC')
        ]
        for field in allFields:
            if field not in [
                    'bgrp', 'OBJECTID', 'RB50_LArea', 'RB50_LABGP',
                    'RB50_ImpP', 'RB50_ForP', 'RB50_VegP', 'RB15_LArea',
                    'RB15_LABGP', 'RB15_ImpP', 'RB15_ForP', 'RB15_VegP'
            ]:
                arcpy.DeleteField_management(finalDir + '/' + city + '_RB_LC',
                                             [field])

        steps.append(
            "Export the fields to be displayed in the EnviroAtlas to a final gdb table. RB15_LArea, RB15_LABGP, RB15_ImpP, RB15_VegP, RB15_ForP, RB50_LArea, RB50_LABGP, RB50_ImpP, RB50_VegP, RB50_ForP.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Write Steps to the Logfile """
        for step in steps:
            rbLCRF.write(step)

        print 'RB_LC End Time; RBXm_LC Start Time: ' + time.asctime()

        #-------- CREATE FOUNDATIONAL LAYERS -----------------------------
        foundsteps = steps[0:9]
        """-------- PREP HYDROLINES ------------------------------------- """
        """ Create lines mid-15m and 51 buffer to extract moving window along """
        for b in ['7.5', '25.5']:
            foundsteps = foundsteps[0:9]
            b2 = b[:-2]  # chop the .5 off
            """ Buffer Hydro Features """
            arcpy.Buffer_analysis('Flowline_SRC', 'Flowline_' + str(b2) + 'm',
                                  str(b) + ' Meters', 'FULL', 'FLAT', 'ALL')
            arcpy.Buffer_analysis('Area_Conn', 'Area_' + str(b2) + 'm',
                                  str(b) + ' Meters', 'FULL', 'FLAT', 'ALL')
            arcpy.Buffer_analysis('Waterbody_Conn',
                                  'Waterbody_' + str(b2) + 'm',
                                  str(b) + ' Meters', 'FULL', 'FLAT', 'ALL')
            foundsteps.append(
                "Buffer the Hydrologically Connected Waterbodies and Areas and the Flowlines (Stream/River, Connector) by 7.5 meters and 25.5 meters.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Merge and Dissolve """
            arcpy.Merge_management([
                'Flowline_' + str(b2) + 'm', 'Area_' + str(b2) + 'm',
                'Waterbody_' + str(b2) + 'm'
            ], 'RB_' + str(b2) + 'm_mess')
            arcpy.Dissolve_management('RB_' + str(b2) + 'm_mess',
                                      'RB_' + str(b2) + 'm')
            foundsteps.append(
                "Merge and dissolve all three 7.5m buffers into one feature. Repeat for 25.5m buffers.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Convert to polylines """
            arcpy.PolygonToLine_management('RB_' + str(b2) + 'm',
                                           'RB_' + str(b2) + 'm_line',
                                           'IGNORE_NEIGHBORS')
            foundsteps.append(
                "Convert each set of buffer polygons (7.5m and 25.5m, separately) into a polyline that represents the analysis line in the center of each size riparian buffer.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Create Land Cover Binaries ------------------------------------- """
        """ Riparian Forest """
        if arcpy.Exists(str(freqDir) + '/RipForIO') == False:
            outReclass4 = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 1], [52, 0], [70, 0],
                                     [80, 0], [82, 0], [91, 1], [92, 0]]))
            outReclass4.save(str(freqDir) + '/RipForIO')
            foundsteps.append(
                "Reclassify the Land Cover into Binary Forest. REPLACE-RFE--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("RipForIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        else:
            foundsteps.append(
                "Reclassify the Land Cover into Binary Forest. REPLACE-RFE--RipForIO--\n"
            )
        """ Vegetated Land """
        if arcpy.Exists(str(freqDir) + '/VegeIO') == False:
            outReclass5 = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 1], [52, 1], [70, 1],
                                     [80, 0], [82, 0], [91, 1], [92, 1]]))
            outReclass5.save(str(freqDir) + '/VegeIO')
            foundsteps.append(
                "Reclassify the Land Cover into Binary Vegetation. REPLACE-VGE--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("VegeIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        else:
            foundsteps.append(
                "Reclassify the Land Cover into Binary Vegetation. REPLACE-VGE--VegeIO--\n"
            )
        """ Water """
        if arcpy.Exists(str(freqDir) + '/WaterIO') == False:
            outReclass3 = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 0], [52, 0], [70, 0],
                                     [80, 0], [82, 0], [91, 0], [92, 0]]))
            outReclass3.save(str(freqDir) + '/WaterIO')
            foundsteps.append(
                "Reclassify the Land Cover into Binary Water. (Water - 10 = 1; All Else = 0.)--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("WaterIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        else:
            foundsteps.append(
                "Reclassify the Land Cover into Binary Water. (Water - 10 = 1; All Else = 0).--WaterIO--"
                + '\n')

        for i, div in [['15', 225], ['51', 2601]]:
            foundsteps = foundsteps[0:15]
            """ Run Moving Window Analyses """
            for bin, mw, name in [['VegeIO', 'Vege', 'Vegetation'],
                                  ['RipForIO', 'RFor', 'Forest'],
                                  ['WaterIO', 'Wat', 'Water']]:
                outFocalStat = arcpy.sa.FocalStatistics(
                    str(freqDir) + '/' + str(bin),
                    arcpy.sa.NbrRectangle(int(i), int(i), 'CELL'), 'SUM',
                    'NODATA')
                outFocalStat.save(str(mw) + '_' + str(i) + 'R')
                foundsteps.append(
                    "Run Focal Statistics on the " + str(name) +
                    " binary with rectangular neighborhoods of " + str(i) +
                    'x' + str(i) + " cells.--" +
                    time.strftime('%Y%m%d--%H%M%S') + '--\n')

            half = (int(i) / 2)
            """ Create a surface that represents percent of forest per land area within each moving window """
            outMinus = (int(i) * int(i)) - arcpy.sa.Raster('/Wat_' + str(i) +
                                                           'R')
            outMinus.save('LA_' + str(i))
            foundsteps.append(
                "Create a land area raster for the " + str(i) +
                "m moving windows by subtracting the water moving window rasters from "
                + str(div) + ", respectively.--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ If the LC isn't large enough, delete erroneous line segments """
            if bigEnough == 0:
                arcpy.Buffer_analysis(
                    str(freqDir) + '/LC_Poly',
                    'LC_Poly_Minus_' + str(half + 1),
                    '-' + str(half + 1) + ' meters')
                arcpy.Union_analysis([
                    str(freqDir) + '/Bnd_Cty', 'LC_Poly_Minus_' + str(half + 1)
                ], 'LC_Minus_BndCty_Union_' + str(half + 1), 'ONLY_FID')
                arcpy.Select_analysis(
                    'LC_Minus_BndCty_Union_' + str(half + 1),
                    'EdgeAffectedArea_' + str(half + 1),
                    'FID_Bnd_Cty > 0 AND FID_LC_Poly_Minus_' + str(half + 1) +
                    ' = -1')

            for lc, name in [['Vege', 'Vegetated Land'], ['RFor', 'Forest']]:
                foundsteps = foundsteps[0:19]
                """ Divide the LC Moving Window by the Land Area Moving Window """
                outDivide = arcpy.sa.Float(
                    arcpy.sa.Raster(lc + '_' + str(i) + 'R')) / arcpy.sa.Float(
                        arcpy.sa.Raster('LA_' + str(i)))
                outDivide.save('Rat_' + lc + str(i))
                foundsteps.append(
                    "Divide the " + str(name) +
                    " moving window raster by the new land area raster.--" +
                    time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Multiply by 100 """
                # Note JPL 8/1/2018: probably don't need to save this layer #
                outTimes = arcpy.sa.Raster('Rat_' + lc + str(i)) * 100
                outTimes.save('P_' + lc + str(i))
                foundsteps.append(
                    "Multiply the ratio raster by 100 to obtain a percent " +
                    str(name) + " raster.--" +
                    time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Extract moving window pixels along mid-buffer lines """
                outExtractByMask1 = arcpy.sa.ExtractByMask(
                    'P_' + lc + str(i), 'RB_' + str(half) + 'm_line')
                outExtractByMask1.save(lc + '_RB' + str(i))
                foundsteps.append("Extract the percent " + str(name) +
                                  " raster along the " + str(i) +
                                  "m analysis line--" +
                                  time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Reclassify extracted pixels into manageable groups """
                outReclass = arcpy.sa.Reclassify(
                    lc + '_RB' + str(i), 'Value',
                    arcpy.sa.RemapRange([[0, 20, 20], [20, 40, 40],
                                         [40, 60, 60], [60, 80, 80],
                                         [80, 100, 100]]))
                outReclass.save(lc + str(i) + '_ReC')
                foundsteps.append(
                    "Reclassify the extracted raster into percentage classes 0-20:20; 20-40:40; 40-60:60; 60-80:80; 80-100:100.--"
                    + time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Convert to Polygons """
                arcpy.RasterToPolygon_conversion(lc + str(i) + '_ReC',
                                                 lc + str(i) + '_Poly',
                                                 'NO_SIMPLIFY')
                foundsteps.append(
                    "Convert the reclassified rasters into polygons without simplifying.--"
                    + time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Intersect Polygon Areas with Original Buffer Lines to Create Foundational Lines """
                arcpy.Intersect_analysis(
                    [lc + str(i) + '_Poly', 'RB_' + str(half) + 'm_line'],
                    lc + '_' + str(half) + 'm_Line', 'ALL', '', 'LINE')
                foundsteps.append(
                    "Spatially join the polygons with the analysis line to add percent class breaks along the analysis line.--"
                    + time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Clip to Bnd """
                arcpy.Clip_analysis(lc + '_' + str(half) + 'm_Line',
                                    freqDir + '/Bnd',
                                    'RB' + str(i) + 'm_' + lc + '_UTM')
                foundsteps.append(
                    "Clip the new analysis lines to the EnviroAtlas community boundary.--"
                    + time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Convert to Albers """
                prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
                arcpy.Project_management('RB' + str(i) + 'm_' + lc + '_UTM',
                                         'RB' + str(i) + 'm_' + lc, prjfile)
                foundsteps.append("Project lines into Albers.--" +
                                  time.strftime('%Y%m%d--%H%M%S') + '--\n')
                """ Change field name """
                if lc == 'Vege':
                    arcpy.AlterField_management('RB' + str(i) + 'm_' + lc,
                                                'gridcode', 'PVege')

                elif lc == 'RFor':
                    arcpy.AlterField_management('RB' + str(i) + 'm_' + lc,
                                                'gridcode', 'PFor')
                """ If the LC isn't large enough, delete erroneous line segments """
                if bigEnough == 0:
                    arcpy.MakeFeatureLayer_management(
                        'RB' + str(i) + 'm_' + lc, 'RB_lyr')
                    arcpy.MakeFeatureLayer_management(
                        'EdgeAffectedArea_' + str(half + 1), 'EEArea')
                    arcpy.SelectLayerByLocation_management(
                        'RB_lyr', 'INTERSECT', 'EEArea', '', 'NEW_SELECTION')
                    arcpy.SelectLayerByAttribute_management(
                        'RB_lyr', 'SWITCH_SELECTION')
                    arcpy.CopyFeatures_management(
                        'RB_lyr', 'RB' + str(i) + 'm_' + lc + '_EdgeEffect')
                    arcpy.SelectLayerByAttribute_management(
                        'RB_lyr', 'CLEAR_SELECTION')
                """ Dissolve Lines """
                try:
                    if lc == 'Vege':
                        arcpy.Dissolve_management(
                            'RB' + str(i) + 'm_' + lc + '_EdgeEffect',
                            city + '_RB' + str(i) + 'm_' + lc + '_D', 'PVege')
                    elif lc == 'RFor':
                        arcpy.Dissolve_management(
                            'RB' + str(i) + 'm_' + lc + '_EdgeEffect',
                            city + '_RB' + str(i) + 'm_' + lc + '_D', 'PFor')
                except:
                    if lc == 'Vege':
                        arcpy.Dissolve_management(
                            'RB' + str(i) + 'm_' + lc,
                            city + '_RB' + str(i) + 'm_' + lc + '_D', 'PVege')
                    elif lc == 'RFor':
                        arcpy.Dissolve_management(
                            'RB' + str(i) + 'm_' + lc,
                            city + '_RB' + str(i) + 'm_' + lc + '_D', 'PFor')
                """ Convert to final shapefiles """
                try:
                    arcpy.Delete_management(finalDir + '/' + str(city) +
                                            '_RB' + str(i) + 'm_' + lc)
                except:
                    pass

                arcpy.FeatureClassToFeatureClass_conversion(
                    city + '_RB' + str(i) + 'm_' + lc + '_D', finalDir,
                    city + '_RB' + str(i) + 'm_' + lc)
                foundsteps.append(
                    "Export the analysis lines to a geodatabase for display in EnviroAtlas.--"
                    + time.strftime('%Y%m%d--%H%M%S') + '--\n')

                tmpName = city + '_RB' + str(i) + 'm_' + str(
                    lc) + '_' + time.strftime('%Y%m%d_%H-%M')
                reportfileName = reportfileDir + '/' + tmpName + '.txt'
                reportFile = open(reportfileName, 'w')

                for step in foundsteps:
                    reportFile.write(step)

                reportFile.close()

                print 'Finshed with RB' + str(i) + 'm_' + str(
                    lc) + ': ' + time.asctime()
        print 'RB End Time: ' + time.asctime() + '\n'

        #-------- COMPELETE LOGFILES ---------------------------------------------
        rbLCRF.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        rbLCRF.write("\nSomething went wrong.\n\n")
        rbLCRF.write("Pyton Traceback Message below:")
        rbLCRF.write(traceback.format_exc())
        rbLCRF.write("\nArcMap Error Messages below:")
        rbLCRF.write(arcpy.GetMessages(2))
        rbLCRF.write("\nArcMap Warning Messages below:")
        rbLCRF.write(arcpy.GetMessages(1))

        rbLCRF.write("\n\nEnded at " + time.asctime() + '\n')
        rbLCRF.write("\n---End of Log File---\n")

        if rbLCRF:
            rbLCRF.close()
Example #27
0
def ShrinkWrap(inFeats,
               dilDist,
               outFeats,
               smthMulti=8,
               scratchGDB="in_memory"):
    # Parse dilation distance, and increase it to get smoothing distance
    smthMulti = float(smthMulti)
    origDist, units, meas = multiMeasure(dilDist, 1)
    smthDist, units, smthMeas = multiMeasure(dilDist, smthMulti)

    # Parameter check
    if origDist <= 0:
        arcpy.AddError(
            "You need to enter a positive, non-zero value for the dilation distance"
        )
        raise arcpy.ExecuteError

    #tmpWorkspace = arcpy.env.scratchGDB
    #arcpy.AddMessage("Additional critical temporary products will be stored here: %s" % tmpWorkspace)

    # Set up empty trashList for later garbage collection
    trashList = []

    # Declare path/name of output data and workspace
    drive, path = os.path.splitdrive(outFeats)
    path, filename = os.path.split(path)
    myWorkspace = drive + path
    Output_fname = filename

    # Process:  Create Feature Class (to store output)
    arcpy.CreateFeatureclass_management(myWorkspace, Output_fname, "POLYGON",
                                        "", "", "", inFeats)

    # Process:  Clean Features
    #cleanFeats = tmpWorkspace + os.sep + "cleanFeats"
    cleanFeats = scratchGDB + os.sep + "cleanFeats"
    CleanFeatures(inFeats, cleanFeats)
    trashList.append(cleanFeats)

    # Process:  Dissolve Features
    #dissFeats = tmpWorkspace + os.sep + "dissFeats"
    # Writing to disk in hopes of stopping geoprocessing failure
    #arcpy.AddMessage("This feature class is stored here: %s" % dissFeats)
    dissFeats = scratchGDB + os.sep + "dissFeats"
    arcpy.Dissolve_management(cleanFeats, dissFeats, "", "", "SINGLE_PART", "")
    trashList.append(dissFeats)

    # Process:  Generalize Features
    # This should prevent random processing failures on features with many vertices, and also speed processing in general
    arcpy.Generalize_edit(dissFeats, "0.1 Meters")

    # Process:  Buffer Features
    #arcpy.AddMessage("Buffering features...")
    #buffFeats = tmpWorkspace + os.sep + "buffFeats"
    buffFeats = scratchGDB + os.sep + "buffFeats"
    arcpy.Buffer_analysis(dissFeats, buffFeats, meas, "", "", "ALL")
    trashList.append(buffFeats)

    # Process:  Explode Multiparts
    #explFeats = tmpWorkspace + os.sep + "explFeats"
    # Writing to disk in hopes of stopping geoprocessing failure
    #arcpy.AddMessage("This feature class is stored here: %s" % explFeats)
    explFeats = scratchGDB + os.sep + "explFeats"
    arcpy.MultipartToSinglepart_management(buffFeats, explFeats)
    trashList.append(explFeats)

    # Process:  Get Count
    numWraps = (arcpy.GetCount_management(explFeats)).getOutput(0)
    arcpy.AddMessage(
        'Shrinkwrapping: There are %s features after consolidation' % numWraps)

    # Loop through the exploded buffer features
    counter = 1
    with arcpy.da.SearchCursor(explFeats, ["SHAPE@"]) as myFeats:
        for Feat in myFeats:
            arcpy.AddMessage('Working on shrink feature %s' % str(counter))
            featSHP = Feat[0]
            tmpFeat = scratchGDB + os.sep + "tmpFeat"
            arcpy.CopyFeatures_management(featSHP, tmpFeat)
            trashList.append(tmpFeat)

            # Process:  Repair Geometry
            arcpy.RepairGeometry_management(tmpFeat, "DELETE_NULL")

            # Process:  Make Feature Layer
            arcpy.MakeFeatureLayer_management(dissFeats, "dissFeatsLyr", "",
                                              "", "")
            trashList.append("dissFeatsLyr")

            # Process: Select Layer by Location (Get dissolved features within each exploded buffer feature)
            arcpy.SelectLayerByLocation_management("dissFeatsLyr", "INTERSECT",
                                                   tmpFeat, "",
                                                   "NEW_SELECTION")

            # Process:  Coalesce features (expand)
            coalFeats = scratchGDB + os.sep + 'coalFeats'
            Coalesce("dissFeatsLyr", smthMeas, coalFeats, scratchGDB)
            # Increasing the dilation distance improves smoothing and reduces the "dumbbell" effect.
            trashList.append(coalFeats)

            # Eliminate gaps
            noGapFeats = scratchGDB + os.sep + "noGapFeats"
            arcpy.EliminatePolygonPart_management(coalFeats, noGapFeats,
                                                  "PERCENT", "", 99,
                                                  "CONTAINED_ONLY")

            # Process:  Append the final geometry to the ShrinkWrap feature class
            arcpy.AddMessage("Appending feature...")
            arcpy.Append_management(noGapFeats, outFeats, "NO_TEST", "", "")

            counter += 1
            del Feat

    # Cleanup
    if scratchGDB == "in_memory":
        garbagePickup(trashList)

    return outFeats
def aggregate_watersheds(watersheds_fc,
                         nhd_gdb,
                         eligible_lakes,
                         output_fc,
                         mode=['interlake', 'cumulative']):
    """Creates a feature class with all the aggregated upstream watersheds for all
    eligible lakes (>4ha and certain FCodes) in this subregion."""
    arcpy.env.workspace = 'in_memory'

    # names
    huc4_code = re.search('\d{4}', os.path.basename(nhd_gdb)).group()
    nhd_waterbody = os.path.join(nhd_gdb, 'NHDWaterbody')
    hydro_net_junctions = os.path.join(nhd_gdb, 'Hydrography',
                                       'HYDRO_NET_Junctions')
    hydro_net = os.path.join(nhd_gdb, 'Hydrography', 'HYDRO_NET')

    # get this hu4
    wbd_hu4 = os.path.join(nhd_gdb, "WBD_HU4")
    field_name = (arcpy.ListFields(wbd_hu4, "HU*4"))[0].name
    whereClause4 = """{0} = '{1}'""".format(
        arcpy.AddFieldDelimiters(nhd_gdb, field_name), huc4_code)
    arcpy.Select_analysis(wbd_hu4, "hu4", whereClause4)

    # make layers for upcoming spatial selections
    # and fcs in memory
    arcpy.MakeFeatureLayer_management(hydro_net_junctions, "junctions")
    arcpy.MakeFeatureLayer_management(watersheds_fc, 'watersheds')

    all_lakes = eligible_lakes
    arcpy.MakeFeatureLayer_management(all_lakes, "all_lakes_lyr")
    ##    arcpy.SelectLayerByLocation_management("all_lakes_lyr", "INTERSECT", "hu8")
    arcpy.CopyFeatures_management("all_lakes_lyr", 'eligible_lakes')

    # ten ha lakes and junctions
    if mode == 'interlake':
        tenha_where_clause = """"AreaSqKm" >= .1"""
        arcpy.Select_analysis("eligible_lakes", 'tenha_lakes',
                              tenha_where_clause)
        arcpy.MakeFeatureLayer_management('tenha_lakes', 'tenha_lakes_lyr')
        arcpy.SelectLayerByLocation_management('junctions',
                                               'INTERSECT',
                                               'tenha_lakes',
                                               search_distance="1 Meters")
        arcpy.CopyFeatures_management('junctions', 'tenha_junctions')
        arcpy.MakeFeatureLayer_management('tenha_junctions',
                                          'tenha_junctions_lyr')
    # for each lake, calculate its interlake watershed in the upcoming block
    prog_count = int(arcpy.GetCount_management('eligible_lakes').getOutput(0))
    counter = 0

    with arcpy.da.SearchCursor('eligible_lakes',
                               ["Permanent_Identifier"]) as cursor:
        for row in cursor:
            counter += 1
            if counter % 50 == 0:
                print("{0} out of {1} lakes completed.".format(
                    counter, prog_count))
            id = row[0]
            where_clause = """"{0}" = '{1}'""".format("Permanent_Identifier",
                                                      id)
            arcpy.MakeFeatureLayer_management('eligible_lakes', "this_lake",
                                              where_clause)
            arcpy.SelectLayerByLocation_management("junctions",
                                                   "INTERSECT",
                                                   "this_lake",
                                                   search_distance="1 Meters")
            count_jxns = int(
                arcpy.GetCount_management('junctions').getOutput(0))
            if count_jxns == 0:
                arcpy.SelectLayerByLocation_management('watersheds',
                                                       'CONTAINS', 'this_lake')
            else:
                arcpy.CopyFeatures_management("junctions", 'this_lake_jxns')
                if mode == 'interlake':
                    arcpy.SelectLayerByLocation_management(
                        'tenha_junctions_lyr', 'ARE_IDENTICAL_TO',
                        'this_lake_jxns')
                    arcpy.SelectLayerByAttribute_management(
                        'tenha_junctions_lyr', 'SWITCH_SELECTION')
                    arcpy.CopyFeatures_management('tenha_junctions_lyr',
                                                  'other_tenha_junctions')
                    arcpy.SelectLayerByLocation_management(
                        'tenha_lakes_lyr',
                        'INTERSECT',
                        'other_tenha_junctions',
                        search_distance='1 Meters')
                    arcpy.TraceGeometricNetwork_management(
                        hydro_net,
                        "upstream",
                        'this_lake_jxns',
                        "TRACE_UPSTREAM",
                        in_barriers='other_tenha_junctions')
                elif mode == 'cumulative':
                    arcpy.TraceGeometricNetwork_management(
                        hydro_net, "upstream", 'this_lake_jxns',
                        "TRACE_UPSTREAM")
                arcpy.SelectLayerByLocation_management("watersheds",
                                                       "CONTAINS",
                                                       "upstream/NHDFlowline")
                arcpy.SelectLayerByLocation_management(
                    "watersheds",
                    'CROSSED_BY_THE_OUTLINE_OF',
                    'upstream/NHDFLowline',
                    selection_type="ADD_TO_SELECTION")
                watersheds_count = int(
                    arcpy.GetCount_management("watersheds").getOutput(0))
                if watersheds_count == 0:
                    arcpy.SelectLayerByLocation_management(
                        'watersheds', 'CONTAINS', 'this_lake')

            # Sometimes when the trace stops at 10-ha lake, that shed(s)
            # gets selected. Remove them with the tenha_lakes_lyr
            # that already has only OTHER lakes selected
            # using other_tenha_junctions causes some stuff to be picked up
            # that shouldn't be when junctions are right on boundaries
            if mode == 'interlake':
                arcpy.SelectLayerByLocation_management(
                    "watersheds",
                    "CONTAINS",
                    "tenha_lakes_lyr",
                    selection_type="REMOVE_FROM_SELECTION")
            arcpy.Dissolve_management("watersheds", "this_watershed")
            arcpy.AddField_management("this_watershed",
                                      'Permanent_Identifier',
                                      'TEXT',
                                      field_length=255)
            arcpy.CalculateField_management("this_watershed",
                                            "Permanent_Identifier",
                                            """'{}'""".format(id), "PYTHON")
            arcpy.Erase_analysis('this_watershed', 'this_lake',
                                 'lakeless_watershed')

            if not arcpy.Exists("output_fc"):
                arcpy.CopyFeatures_management('lakeless_watershed',
                                              "output_fc")
                # to avoid append mismatch due to permanent_identifier
                cu.lengthen_field("output_fc", 'Permanent_Identifier', 255)
            else:
                arcpy.Append_management('lakeless_watershed', "output_fc",
                                        'NO_TEST')
            for item in [
                    'this_lake', 'this_watershed', 'this_lake_jxns',
                    'upstream', 'lakeless_watershed', 'other_tenha_junctions'
            ]:
                try:
                    arcpy.Delete_management(item)
                except:
                    continue

    arcpy.EliminatePolygonPart_management("output_fc", "output_hole_remove",
                                          "AREA", "3.9 Hectares", "0",
                                          "CONTAINED_ONLY")
    arcpy.Clip_analysis("output_hole_remove", "hu4", output_fc)
    arcpy.Delete_management('output_fc')
    arcpy.ResetEnvironments()
Example #29
0
# mergeandeliminate.py
import arcpy, os
mem = "in_memory"
arcpy.env.workspace = mem
infolder = arcpy.GetParameterAsText(0)
outname = arcpy.GetParameterAsText(1)
list = []
for root, dirs, files in arcpy.da.Walk(infolder):
    for file in files:
        list.append(os.path.join(root, file))
fc = list[1]

fcs = arcpy.ListFeatureClasses(infolder)
fms = arcpy.FieldMappings()
fm = arcpy.FieldMap()
fm.addInputField(fc, "NHD_ID")
fms.addFieldMap(fm)
arcpy.Merge_management(list, "merge", fms)
arcpy.EliminatePolygonPart_management("merge", outname, "AREA", "3.9 Hectares",
                                      "0", "CONTAINED_ONLY")
Example #30
0
def ImpP(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld),
                                       str(city) + '_ImpProx.gdb')
    except:
        pass
    workGDB = str(workFld) + '/' + str(city) + '_ImpProx.gdb'
    """ Window Views of Water Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld),
                                       str(city) + '_WaterWV.gdb')
    except:
        pass
    rgGDB = str(workFld) + '/' + str(city) + '_WaterWV.gdb'
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    """ Split Raster Directory """
    if os.path.isdir(str(workFld) + '/' + city + '_Split') == True:
        pass
    else:
        os.makedirs(str(workFld) + '/' + city + '_Split')
    splitDir = str(workFld) + '/' + city + '_Split'
    """ Set Workspace Environments """
    arcpy.env.workspace = workGDB
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_ImpProx_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        reportFile = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first line of report file """
        print 'Impervious Proximity Start Time: ' + time.asctime()
        reportFile.write(
            "Begin with EnviroAtlas 1-meter Land Cover for the EnviroAtlas community--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.extent = str(freqDir) + '/LC'
        arcpy.env.snapRaster = str(freqDir) + '/LC'
        """ Set Projection Files """
        prjfileALB = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
        prjNumb = arcpy.Describe(str(freqDir) + '/LC').spatialReference.name
        prjNumb = prjNumb[-3:]
        prjfileUTM = prjDir + '/NAD 1983 UTM Zone ' + prjNumb + '.prj'
        """-------- Reclassify LC into Binary Impervious ----------------------------- """
        if arcpy.Exists(freqDir + '/ImpIO') == False:
            #Run the Imp Space reclassification
            outReclass5 = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 0], [40, 0], [52, 0], [70, 0],
                                     [80, 0], [82, 0], [91, 0], [92, 0]]))
            outReclass5.save(freqDir + '/ImpIO')
            del outReclass5
            reportFile.write(
                "Reclassify the Land Cover into Binary Impervious. (Impervious - 20 = 1; All Else = 0)--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("ImpIO--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        else:
            reportFile.write(
                "Reclassify the Land Cover into Binary Impervious. (Impervious - 20 = 1; All Else = 0)--ImpIO"
                + '--\n')
        """-------- Create 1001m Moving Window ------------------------------------- """
        outFocalStat4 = arcpy.sa.FocalStatistics(
            freqDir + '/ImpIO', arcpy.sa.NbrRectangle(1001, 1001, 'CELL'),
            'SUM', 'NODATA')
        outFocalStat4.save('Imp_1001R')
        arcpy.CalculateStatistics_management('Imp_1001R')
        del outFocalStat4
        print("1001m moving winow complete... " + time.asctime())
        reportFile.write(
            "Compute Focal Statistics on the Impervious Binary with 1001 pixel wide by 1001 pixel high rectangular window with statistics type = sum.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Reclassify Moving Window into 20% Breaks ----------------------- """
        outReclass3 = arcpy.sa.Reclassify(
            'Imp_1001R', 'Value',
            arcpy.sa.RemapRange([[0, 200400.2, 20], [200400.2, 400800.4, 40],
                                 [400800.4, 601200.6, 60],
                                 [601200.6, 801600.80, 80],
                                 [801600.8, 1002001, 100]]), 'NODATA')
        outReclass3.save('Imp_Pct')
        del outReclass3
        print("1001m moving winow reclassification complete... " +
              time.asctime())
        reportFile.write(
            "Reclassify the Focal Statistics into 5 classes at 20% intervals.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            arcpy.env.extent = freqDir + '/LC'
            arcpy.env.snapRaster = freqDir + '/LC'
            ReC = arcpy.sa.arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            del ReC
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
            """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
            """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ Create the Water Binary, if necessary """
        if arcpy.Exists(freqDir + '/WaterIO') == True:
            reportFile.write(
                "Create a water binary from the 1-M EnviroAtlas Land Cover. (Water - 10 = 1; All Else = 0).--WaterIO"
                + '--\n')
        else:
            outReclass3 = arcpy.sa.Reclassify(
                freqDir + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 0], [52, 0], [70, 0],
                                     [80, 0], [82, 0], [91, 0], [92, 0]]))
            outReclass3.save(freqDir + '/WaterIO')
            del outReclass3
            reportFile.write(
                "Create a water binary from the 1-M EnviroAtlas Land Cover. (Water - 10 = 1; All Else = 0.)--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("WaterIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        """ Create the Water Region Group, if necessary """
        if arcpy.Exists(rgGDB + '/WatIO_300') == True:
            reportFile.write(
                "Run the region group tool to group waterbodies together with options number of neighbors to use: EIGHT, zone grouping method: WITHIN, and ADD_LINK (add link field to output).--WaterRG"
                + '--\n')
        else:
            RG = arcpy.sa.RegionGroup(freqDir + '/WaterIO', 'EIGHT', 'WITHIN',
                                      'ADD_LINK')
            RG.save(rgGDB + '/WatIO_300')
            del RG
            arcpy.AddField_management(rgGDB + '/WatIO_300', 'Count_2',
                                      'DOUBLE')
            arcpy.CalculateField_management(rgGDB + '/WatIO_300', 'Count_2',
                                            '!COUNT!', 'PYTHON')
            reportFile.write(
                "Run the region group tool to group waterbodies together with options number of neighbors to use: EIGHT, zone grouping method: WITHIN, and ADD_LINK (add link field to output).--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("WaterRG--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        """ Limit the WatIO_300 to the Extent of Imp_Pct """
        ##        if bigEnough == 0:
        ##            EbM = arcpy.sa.ExtractByMask(rgGDB + '/WatIO_300', 'Imp_Pct')
        ##            EbM.save('WatRG_Lim_1001')
        ##            del EbM
        ##            print("Extract by mask complete... " + time.asctime())
        ##            reportFile.write("Restrict the extent of the water bodies raster to that of the percent impervious raster.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        if bigEnough == 0:
            desc = arcpy.Describe('Imp_Pct')
            rectangle = "%s %s %s %s" % (desc.extent.XMin, desc.extent.YMin,
                                         desc.extent.XMax, desc.extent.YMax)
            IP01 = arcpy.sa.Reclassify('Imp_Pct', 'Value',
                                       arcpy.sa.RemapRange([[0, 100, 1]]),
                                       'NODATA')
            arcpy.RasterToPolygon_conversion(IP01, 'Imp_Pct_poly',
                                             "NO_SIMPLIFY", "Value")
            del IP01
            arcpy.Clip_management(rgGDB + '/WatIO_300', rectangle,
                                  'WatRG_Lim_251', 'Imp_Pct_poly', '#',
                                  'ClippingGeometry')
            reportFile.write(
                "Restrict the extent of the water bodies raster to that of the percent impervious raster.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Burn water into the Imp_Pct """
        if bigEnough == 0:
            burnwater = arcpy.sa.Con('WatRG_Lim_1001', '-99999', 'Imp_Pct',
                                     'Count_2 > 299 AND LINK = 1')
        else:
            burnwater = arcpy.sa.Con(rgGDB + '/WatIO_300', '-99999', 'Imp_Pct',
                                     'Count_2 > 299 AND LINK = 1')
        burnwater.save('Imp_Pct_Wat')
        del burnwater
        print("Water burned into Imp_Pct... " + time.asctime())
        reportFile.write(
            "Using the region group output, burn water bodies over 300m2 into the impervious percent raster using a conditional statement if Region Group Count > 299 AND Link = 1; for true: -99999; for false: impervious percent raster--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Convert to Esri GRID format """
        try:
            arcpy.Delete_management(splitDir + '/imp_pct_wat')
        except:
            pass
        arcpy.RasterToOtherFormat_conversion('Imp_Pct_Wat', splitDir, 'GRID')
        reportFile.write("Convert the raster into Esri GRID format--" +
                         time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Split the Raster As Needs, Process Each Piece ----------------- """
        """ Check if the raster should be split """
        columns = arcpy.GetRasterProperties_management(
            'Imp_Pct_Wat', 'COLUMNCOUNT').getOutput(0)
        xsplit = int(float(columns) / 40000) + 1
        rows = arcpy.GetRasterProperties_management('Imp_Pct_Wat',
                                                    'ROWCOUNT').getOutput(0)
        ysplit = int(float(rows) / 40000) + 1
        """--------- Clip the EA Boundaries to the County Lines, if necessary ----- """
        if arcpy.Exists(str(freqDir) + '/Bnd_Cty') == False:
            """ Copy Counties to Frequent and Project """
            arcpy.MakeFeatureLayer_management(
                str(inDir) + '/Input.gdb/Counties_Alb', 'Cty')
            arcpy.SelectLayerByLocation_management('Cty', 'CONTAINS', 'BG_Alb',
                                                   '', 'NEW_SELECTION')
            arcpy.FeatureClassToFeatureClass_conversion(
                str(inDir) + '/Input.gdb/Counties_Alb', str(freqDir),
                'Counties_Alb')
            arcpy.SelectLayerByAttribute_management('Cty', 'CLEAR_SELECTION')
            descLC = arcpy.Describe(str(freqDir) + '/LC')
            arcpy.Project_management('Counties_Alb', 'Counties',
                                     descLC.spatialReference)
            """ Clip Boundary to County Lines """
            arcpy.Clip_analysis(
                str(freqDir) + '/Bnd',
                str(freqDir) + '/Counties',
                str(freqDir) + '/Bnd_Cty')
            reportFile.write(
                "Clip the EnviroAtlas Community Boundary to the county lines for the community to limit the output to land area.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("Bnd_Cty--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        else:
            reportFile.write(
                "Clip the EnviroAtlas Community Boundary to the county lines for the community to limit the output to land area.--Bnd_Cty"
                + '--\n')
            """-------- If no split, run the analysis --------------------------------- """
        if xsplit * ysplit == 1:
            """ Convert Raster to Polygon """
            arcpy.RasterToPolygon_conversion('Imp_Pct_Wat', 'Poly_Imp',
                                             'NO_SIMPLIFY')
            reportFile.write(
                "Convert the raster into a polygon feature class.--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Clip the polygons to Bnd_Cty """
            arcpy.Clip_analysis('Poly_Imp', freqDir + '/Bnd_Cty', 'Clip_Imp')
            reportFile.write(
                "Clip the polygon feature class to the clipped EnviroAtlas community boundary.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Dissolve the polygons """
            arcpy.Dissolve_management('Clip_Imp', 'ImpDiss', 'gridcode')
            reportFile.write(
                "Dissolve the clipped polygons based on grid_code.--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """-------- If split, run the analysis on each piece and recombine --------- """
        else:
            """ Delete the raster, if necessary """
            xy = (xsplit * ysplit)
            for rast in range(xy):
                try:
                    arcpy.Delete_management(splitDir + '/impp_' + str(rast))
                except:
                    pass
            """ Split the Raster """
            print("Raster too big, splitting into " + str(xy) + " pieces... " +
                  time.asctime())
            arcpy.SplitRaster_management(splitDir + '/imp_pct_wat', splitDir,
                                         'impp_', 'NUMBER_OF_TILES', 'GRID',
                                         '',
                                         str(xsplit) + ' ' + str(ysplit))
            print("Raster split complete... " + time.asctime())
            reportFile.write(
                "Split the raster into pieces of no more than 40,000x40,000 pixels.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ For each raster: """
            for Chunk in range(0, xy):
                try:
                    result = float(
                        arcpy.GetRasterProperties_management(
                            splitDir + '/impp_' + str(Chunk),
                            'MEAN').getOutput(0))
                    """ If the raster piece has data: """
                    if (result <> 0):
                        """ Set Environments """
                        arcpy.env.snapRaster = 'Imp_1001R'
                        arcpy.env.extent = 'Imp_1001R'
                        """ Copy the piece back to the Working Directory """
                        arcpy.RasterToOtherFormat_conversion(
                            splitDir + '/impp_' + str(Chunk), workGDB)
                        """ Convert Raster to Polygon """
                        arcpy.RasterToPolygon_conversion(
                            'impp_' + str(Chunk), 'ImpPoly_' + str(Chunk),
                            'NO_SIMPLIFY')
                        """ Clip the polygons to Bnd_Cty """
                        arcpy.Clip_analysis('ImpPoly_' + str(Chunk),
                                            freqDir + '/Bnd_Cty',
                                            'ImpClip_' + str(Chunk))
                        """ Dissolve the polygons """
                        arcpy.Dissolve_management('ImpClip_' + str(Chunk),
                                                  'ImpD1_' + str(Chunk),
                                                  'gridcode')
                        print("Processed Chunk " + str(Chunk) + " / " +
                              str(xy) + "..." + time.asctime())
                    else:
                        pass
                except:
                    pass
            reportFile.write(
                "Convert each of the raster pieces into a polygon feature class.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            reportFile.write(
                "Clip the polygon feature classes to the clipped EnviroAtlas community boundary.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            reportFile.write("Dissolve each piece based on grid_code.--" +
                             time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Merge the polygons back together """
            fcList = arcpy.ListFeatureClasses('ImpD1*')
            arcpy.Merge_management(fcList, 'ImpDiss')
            reportFile.write("Merge the pieces back together.--" +
                             time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Dissolve the pieces again """
        arcpy.Dissolve_management('ImpDiss', 'ImpProx_UTM', 'gridcode')
        reportFile.write("Dissolve again based on grid_code.--" +
                         time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Change gridcode to ImpProxP """
        arcpy.AlterField_management('ImpProx_UTM', 'gridcode', 'ImpProxP')
        reportFile.write("Rename field 'gridcode' into field 'ImpProxP'--" +
                         time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            ##            """ Extract the area where the boundary extends beyond the analysis area """
            ##            arcpy.Erase_analysis(freqDir + '/Bnd_Cty', 'ImpProx_UTM', 'EE_Area')
            ##            arcpy.AddField_management('EE_Area', 'ImpProxP', 'LONG')
            ##            arcpy.CalculateField_management('EE_Area', 'ImpProxP', -99998, 'PYTHON_9.3')
            ##            """ Append the non-analyzed area onto the main output """
            ##            arcpy.Append_management('EE_Area', 'ImpProx_UTM')
            ##            """ Dissolve the output, again """
            ##            arcpy.Dissolve_management('ImpProx_UTM', 'ImpProx_EE_D', 'ImpProxP')
            ##            reportFile.write("Due to the extent of the Land Cover, the analysis area is smaller than the EnviroAtlas Community Boundary. Extract the area within the boundary that is not within the analysis area, add field 'ImpProxP' = -99998 and append on to the output file. Dissolve based on ImpProxP.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Buffer each GreClip feature by 1 cm and erase from Bnd_Cty """
            fcList = arcpy.ListFeatureClasses('ImpClip*')
            for idx, fc in enumerate(fcList):
                arcpy.Buffer_analysis(fc, (fc + "_1cmbuff"), "1 Centimeters",
                                      "FULL", "ROUND", "ALL")
                #arcpy.Buffer_analysis(fc + "_inbuff", fc + "outbuff", "1 Centimeter", "FULL", "ROUND", "ALL")
                if idx == 0:
                    arcpy.Erase_analysis(freqDir + '/Bnd_Cty',
                                         (fc + "_1cmbuff"),
                                         'EE_Area_' + str(idx))
                else:
                    arcpy.Erase_analysis('EE_Area_' + str(idx - 1),
                                         fc + "_1cmbuff",
                                         'EE_Area_' + str(idx))
                a = idx
                print("Processed Chunk " + str(Chunk) + " / " + str(xy) +
                      "..." + time.asctime())
            arcpy.Buffer_analysis('EE_Area_' + str(a), 'EE_Area_2cmbuff',
                                  '2 Centimeters', "FULL", "ROUND", "ALL")
            arcpy.Clip_analysis('EE_Area_2cmbuff', freqDir + '/Bnd_Cty',
                                'EE_Area')
            arcpy.AddField_management('EE_Area', 'ImpProxP', 'LONG')
            arcpy.CalculateField_management('EE_Area', 'ImpProxP', -99998,
                                            'PYTHON_9.3')
            """ Append the non-analyzed area onto the main output """
            arcpy.Append_management('EE_Area', 'ImpProx_UTM')
            """ Dissolve the output, again """
            arcpy.Dissolve_management('ImpProx_UTM', 'ImpProx_EE_D',
                                      'ImpProxP')
            print("Big enough workflow complete... " + time.asctime())
            reportFile.write(
                "Due to the extent of the Land Cover, the analysis area is smaller than the EnviroAtlas Community Boundary. Extract the area within the boundary that is not within the analysis area, add field 'ImpProxP' = -99998 and append on to the output file. Dissolve based on ImpProxP.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Project the output """
        try:
            arcpy.Project_management('ImpProx_EE_D', city + '_ImpProx',
                                     prjfileALB)
        except:
            arcpy.Project_management('ImpProx_UTM', city + '_ImpProx',
                                     prjfileALB)
        reportFile.write("Project the feature class into Albers.--" +
                         time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add to Final Directory """
        arcpy.FeatureClassToFeatureClass_conversion(city + '_ImpProx',
                                                    finalDir,
                                                    city + '_ImpProx')
        reportFile.write("Copy the feature class to the final geodatabase.--" +
                         time.strftime('%Y%m%d--%H%M%S') + '--\n')
        print 'Impervious Proximity End Time: ' + time.asctime() + '\n'

        #-------- COMPELETE LOGFILES ---------------------------------------------
        reportFile.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        reportFile.write("\nSomething went wrong.\n\n")
        reportFile.write("Pyton Traceback Message below:")
        reportFile.write(traceback.format_exc())
        reportFile.write("\nArcMap Error Messages below:")
        reportFile.write(arcpy.GetMessages(2))
        reportFile.write("\nArcMap Warning Messages below:")
        reportFile.write(arcpy.GetMessages(1))

        reportFile.write("\n\nEnded at " + time.asctime() + '\n')
        reportFile.write("\n---End of Log File---\n")

        if reportFile:
            reportFile.close()