Exemple #1
0
def check_area(input_file, workspace):
    """check the area values and make sure they are reasonable."""
    original_sum = 0
    final_sum = 0

    # Project to Equal Area
    reprojected = workspace + '\\Reproject.shp'
    projection = os.path.dirname(os.path.abspath(
        __file__)) + '\\projection\\Cylindrical_Equal_Area_world.prj'
    ARCPY.Project_management(input_file, reprojected, projection)

    area_shapefile = workspace + '\\Area_Shapefile.shp'
    ARCPY.CalculateAreas_stats(reprojected, area_shapefile)

    rows = ARCPY.SearchCursor(area_shapefile)
    for row in rows:
        original_sum += row.AREA
        final_sum += (row.F_AREA / 1000000)

    ARCPY.Delete_management(
        reprojected)  # Delete multi-part .shp part results.
    ARCPY.Delete_management(
        area_shapefile)  # Delete Area Statistics .shp part results.

    return [str(original_sum), str(final_sum), str(original_sum - final_sum)]
Exemple #2
0
def filter_polygon(state, extract_comb):
    poly_1 = constants.out_dir + os.sep + state + os.sep + 'poly_1.shp'
    poly_2 = constants.out_dir + os.sep + state + os.sep + 'poly_2.shp'

    arcpy.RasterToPolygon_conversion(extract_comb, poly_1, "NO_SIMPLIFY",
                                     "VALUE")
    arcpy.CalculateAreas_stats(poly_1, poly_2)
def Read_Attribute_Table_To_EXCEL(featureClass, hotorcold):
    '''
    To write the attribute table of feature class into excel file
    :param featureClass: feature class name of input
    :param hotorcold: select hot or cold spot analysis
    :return:
    '''
    arcpy.CalculateAreas_stats(featureClass, featureClass + "Area")
    rows = arcpy.da.SearchCursor(featureClass + "Area", ['name', 'F_AREA'])
    wb = xlwt.Workbook()
    ws = wb.add_sheet(hotorcold)
    ws.write(0, 0, featureClass + "Area")
    ws.write(1, 0, "Area")
    r = 1
    sum = 0
    for row in rows:
        ws.write(0, r, row[0].encode('utf-8'))
        ws.write(1, r, row[1])
        sum += row[1]
        r += 1
    ws.write(0, r, "SUM")
    ws.write(1, r, sum)
    wb.save(hotorcold + '.xls')
    if arcpy.Exists(featureClass + "Area"):
        arcpy.Delete_management(featureClass + "Area")
    print("Complete reading name and area to excel file!")
Exemple #4
0
def check_area(input_file, workspace):
    """check the area values and make sure they are reasonable."""
    from utilities.projection import cylindrical_equal_area

    original_sum = 0
    final_sum = 0

    # Project to Equal Area
    reprojected = workspace + '\\Reproject.shp'
    arcpy.Project_management(input_file, reprojected, cylindrical_equal_area())

    area_shapefile = workspace + '\\Area_Shapefile.shp'
    arcpy.CalculateAreas_stats(reprojected, area_shapefile)

    rows = arcpy.SearchCursor(area_shapefile)
    for row in rows:
        original_sum += row.AREA
        final_sum += (row.F_AREA / 1000000)

    arcpy.Delete_management(
        reprojected)  # Delete multi-part .shp part results.
    arcpy.Delete_management(
        area_shapefile)  # Delete Area Statistics .shp part results.

    return [str(original_sum), str(final_sum), str(original_sum - final_sum)]
Exemple #5
0
    def mu_maker(self, h_raster, u_raster, full_out_ras_name,
                 full_out_shp_name, *mu):
        # h_raster: STR - full path to depth raster
        # u_raster: STR - full path to velocity raster
        # full_out_ras_name: STR - full path of the results raster name
        # full_out_shp_name: STR - full path of the result shapefile name
        # mu = LIST(STR) - (optional) - restricts analysis to a list of morphological units according to mu.xlsx

        # start with raster calculations
        self.logger.info("Raster Processing    --- --- ")
        self.license_state = arcpy.CheckOutExtension(
            'Spatial')  # check out license
        arcpy.gp.overwriteOutput = True
        arcpy.env.workspace = self.path
        arcpy.env.extent = "MAXOF"

        try:
            self.mu_names = mu[
                0]  # limit mu analysis to optional list, if provided
        except:
            pass

        out_ras = self.calculate_mu(h_raster, u_raster)

        try:
            self.logger.info(" > Saving Raster ...")
            out_ras.save(full_out_ras_name)
            self.logger.info("   * OK")
        except:
            self.logger.info("ERROR: Could not save MU raster.")
        arcpy.CheckInExtension('Spatial')  # release license
        self.logger.info("Raster Processing OK     --- \n")

        self.logger.info("Shapefile Processing --- --- ")
        self.logger.info(" > Converting mu raster to shapefile ...")
        temporary_shp = full_out_shp_name.split(".shp")[0] + "1.shp"
        arcpy.RasterToPolygon_conversion(arcpy.Raster(full_out_ras_name),
                                         temporary_shp, 'NO_SIMPLIFY')

        self.logger.info(" > Calculating Polygon areas ...")
        arcpy.CalculateAreas_stats(temporary_shp, full_out_shp_name)

        self.logger.info("   * OK - Removing remainders ...")
        arcpy.Delete_management(temporary_shp)

        self.logger.info(" > Adding MU field ...")
        arcpy.AddField_management(full_out_shp_name,
                                  "MorphUnit",
                                  "TEXT",
                                  field_length=50)
        expression = "the_dict[!gridcode!]"
        codeblock = "the_dict = " + str(
            dict(
                zip(self.mu_names_number.values(),
                    self.mu_names_number.keys())))
        arcpy.CalculateField_management(full_out_shp_name, "MorphUnit",
                                        expression, "PYTHON", codeblock)
        self.logger.info("Shapefile Processing OK  --- ")
def get_area(feature_class):
    '''returns the total area of a feature class'''

    temp_fc = generate_gdb_filename(return_full=True)
    arcpy.CalculateAreas_stats(feature_class, temp_fc)
    area_field = "F_AREA"  # this is hardcoded, but now guaranteed because it is added to a copy and the field is updated if it already exists

    area_curs = arcpy.SearchCursor(temp_fc)
    total_area = 0
    for row in area_curs:
        total_area += row.getValue(area_field)
    del row
    del area_curs

    return total_area
def raster_to_polygon (feature, raster, workspace, raster_scaling = 1000):
    """Convert raster to a features class, clip it to an input feature and
    calculate the area of each polygon. This new feature class is then 
    returned for calculating statistics. """
    
    # Scale the subset DEM and temporarily save it to file. If it is not
    # saved an error is sometimes thrown when converting to polygon.
    # This is no good reason for this VAT error.
    rand_id = str(random.randrange(10000, 999999))
    subset_name = workspace + '\\raster_to_poly_' + rand_id + '.img'
    subset = spatial.Int(spatial.Raster(raster) * raster_scaling + 0.5)
    subset.save(subset_name)

    polygon = ARCPY.RasterToPolygon_conversion(subset, subset_name, "NO_SIMPLIFY")
    clipped = ARCPY.Clip_analysis(polygon, feature.shape, 'in_memory\\clip_' + rand_id)
    feature = ARCPY.CalculateAreas_stats(clipped, 'in_memory\\area_'+ rand_id)
    
    ARCPY.Delete_management(subset)
    ARCPY.Delete_management(polygon)
    ARCPY.Delete_management(clipped)
    
    return feature
    
    
Exemple #8
0
        sys.exit()

    if Want_CloudRemoval != 'True':
        mask_dir = os.makedirs(workspace + 'empty')
    import DebrisMap
    DebrisMap.DebrisMap(workspace, data_dir, landsat, shp_dir, mask_dir,
                        A_remove, A_fill, Want_CloudRemoval)
    finddeb = arcpy.ListFeatureClasses('*MERGED*')
    debarea = workspace + finddeb[0]
    del finddeb
    if Want_CloudRemoval != 'True':
        del mask_dir
        arcpy.Delete_management(workspace + 'empty')

##-----------------------------------------  CliffProcessingSegments  ------------------------------------------------
arcpy.CalculateAreas_stats(debarea, 'debareaMeters.shp')
rows = arcpy.SearchCursor('debareaMeters.shp')
for row in rows:
    debarea_m2 = row.getValue("F_AREA")
del row, rows
arcpy.Delete_management('debareaMeters.shp')
workspaceSplit = workspace.split("\\")[-2]
workspace = workspace[:-workspaceSplit.count('')]
workspace = workspace + 'CliffProcessingSegments\\'
fishnetRes = L_t  #name follows Herreid and Pellicciotti, 2018
lookDistance = n_c  #name follows Herreid and Pellicciotti, 2018
try:
    os.makedirs(workspace)
    env.workspace = workspace
except:
    print "Cliff segmentation workspace cannot be created. It may already exist."
Exemple #9
0
    def glacier_debris(band_4, band_5, glacier_outline, out_dir):
        print 'Running glacier_debris'
        if Want_CloudRemoval == 'True':
            outExtractByMask = ExtractByMask(
                band_4,
                mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] +
                band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp')
            outExtractByMask.save('del_nodatagone4.TIF')
            outExtractByMask = ExtractByMask(
                band_5,
                mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] +
                band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp')
            outExtractByMask.save('del_nodatagone5.TIF')
            outExtractByMask = ExtractByMask('del_nodatagone4.TIF',
                                             glacier_outline)
            outExtractByMask.save('del_mask4.TIF')
            outExtractByMask = ExtractByMask('del_nodatagone5.TIF',
                                             glacier_outline)
            outExtractByMask.save('del_mask5.TIF')
            print 'extract'
        else:
            outExtractByMask = ExtractByMask(band_4, glacier_outline)
            outExtractByMask.save('del_mask4.TIF')
            outExtractByMask = ExtractByMask(band_5, glacier_outline)
            outExtractByMask.save('del_mask5.TIF')
            print 'extract'
        #Convert Raster to float for decimal threshold values
        arcpy.RasterToFloat_conversion('del_mask4.TIF', 'del_band_4a.flt')
        arcpy.RasterToFloat_conversion('del_mask5.TIF', 'del_band_5a.flt')
        arcpy.Divide_3d('del_band_4a.flt', 'del_band_5a.flt',
                        'del_division.TIF')
        print 'division'
        outSetNull = SetNull('del_division.TIF', 'del_division.TIF',
                             'VALUE > ' + str(threshold))

        #path to results folder, for loops add a counter if images are from the same year and day
        result_name = glacier_outline.split('.shp')[0].split(
            '\\'
        )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split(
            '\\')[-1][13:16] + 'd' + '_L' + band_4.split(
                '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split(
                    '_')[-1][1:2] + 'b' + str(int(
                        threshold *
                        100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f'
        result_path = out_dir + glacier_outline.split('.shp')[0].split(
            '\\'
        )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split(
            '\\')[-1][13:16] + 'd' + '_L' + band_4.split(
                '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split(
                    '_')[-1][1:2] + 'b' + str(int(
                        threshold *
                        100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f'

        if str(result_name + '1.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '2'
        elif str(result_name + '2.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '3'
        elif str(result_name + '3.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '4'
        elif str(result_name + '4.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '5'
        elif str(result_name + '5.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '6'
        else:
            result_path = result_path + '1'

        result_file = result_path + '.TIF'
        print 'result file: ' + result_file

        outSetNull.save(result_file)
        print 'Level 1 product produced'

        #Float raster to integer
        outInt = Int(result_file)
        outInt.save('del_result_file_int.TIF')
        # Set local variables
        inRaster = 'del_result_file_int.TIF'
        outPolygons = 'del_debris.shp'
        field = 'VALUE'
        arcpy.RasterToPolygon_conversion(inRaster, outPolygons, 'NO_SIMPLIFY',
                                         field)
        print 'to polygon'

        #Process: Dissolve. need to create "value" row where all elements=0
        arcpy.AddField_management('del_debris.shp', 'value', 'SHORT', 1, '',
                                  '', '', '', '')
        arcpy.Dissolve_management('del_debris.shp', 'del_debris_dissolve.shp',
                                  'value')
        print 'dissolve'
        # Run the tool to create a new fc with only singlepart features
        arcpy.MultipartToSinglepart_management('del_debris_dissolve.shp',
                                               'del_explode.shp')
        print 'explode'
        # Process: Calculate polygon area (km2)
        arcpy.CalculateAreas_stats('del_explode.shp', 'del_area.shp')
        arcpy.MakeFeatureLayer_management('del_area.shp', 'tempLayer')
        # Execute SelectLayerByAttribute to determine which features to delete
        expression = 'F_AREA <=' + str(A_remove)  # m2
        arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION',
                                                expression)
        arcpy.DeleteFeatures_management('tempLayer')
        print 'Shapes with an area <= ' + str(
            A_remove) + ' m2 removed; ' + str(
                A_remove / 900) + ' pixles, if 30m pixels'
        #Delete polygons < xx m2
        arcpy.Delete_management('tempLayer')
        print 'tempLayer deleted'
        result_file2 = result_path + '.shp'
        print 'Level 2 result file: ' + result_file2
        #Process: aggrigate (distance=1 m minimum area=0 minimum hole size=xx m: )
        CA.AggregatePolygons('del_area.shp', result_file2, 1, 0, A_fill,
                             'NON_ORTHOGONAL')
        print 'holes with an area <= ' + str(
            A_fill) + ' m2 filled/merged with debris polygon; ' + str(
                A_fill / 900) + ' pixles, if 30m pixels'

        rasterList = arcpy.ListRasters('*del*')
        for raster in rasterList:
            arcpy.Delete_management(raster)

        fcList = arcpy.ListFeatureClasses('*del*')
        for fc in fcList:
            arcpy.Delete_management(fc)

        print 'intermediate files deleted'
        print 'level 2 product produced'
def connect_clusters(linkTable):
    # CUSTOM Fragment connecting code
    try:
        clusterFC = path.join(
            cfg.SCRATCHDIR,
            "Cores_Grouped_dist" + str(int(cfg.MAXEUCDIST)) + ".shp")
        arcpy.CopyFeatures_management(cfg.COREFC, clusterFC)

        gprint('Running custom fragment connecting code.')
        numLinks = linkTable.shape[0]

        cluster_ID = 'clus' + str(int(cfg.MAXEUCDIST))
        if arcpy.ListFields(clusterFC, cluster_ID):
            arcpy.DeleteField_management(clusterFC, cluster_ID)
        arcpy.AddField_management(clusterFC, cluster_ID, "LONG")

        rows = arcpy.UpdateCursor(clusterFC)
        row = next(rows)
        while row:
            # linkCoords indices
            fragID = row.getValue(cfg.COREFN)
            row.setValue(cluster_ID, fragID)
            rows.UpdateRow(row)
            row = next(rows)
        del row, rows

        linkTable[:, cfg.LTB_CLUST1] = linkTable[:, cfg.LTB_CORE1]
        linkTable[:, cfg.LTB_CLUST2] = linkTable[:, cfg.LTB_CORE2]

        #if frags less than cutoff set cluster_ID equal.
        for x in range(0, numLinks):
            gprint("link #" + str(x + 1))
            # Set newfragmentID of 2nd fragment to that of frag 1
            frag1ID = linkTable[x, cfg.LTB_CLUST1]
            frag2ID = linkTable[x, cfg.LTB_CLUST2]
            if frag1ID == frag2ID:
                continue
            eucDist = linkTable[x, cfg.LTB_EUCDIST]

            if eucDist < cfg.MAXEUCDIST:
                gprint("Joining fragments " + str(frag1ID) + " and " +
                       str(frag2ID) + " separated by distance " + str(eucDist))
                # update linktable to new fragment ID in cluster field
                rows = npy.where(linkTable[:, cfg.LTB_CLUST1] == frag2ID)
                linkTable[rows, cfg.LTB_CLUST1] = frag1ID
                rows = npy.where(linkTable[:, cfg.LTB_CLUST2] == frag2ID)
                linkTable[rows, cfg.LTB_CLUST2] = frag1ID
                del rows

                # update shapefile to new fragment ID in cluster_ID field
                rows = arcpy.UpdateCursor(clusterFC)
                row = next(rows)
                while row:
                    if row.getValue(cluster_ID) == frag2ID:
                        row.setValue(cluster_ID, frag1ID)
                    rows.UpdateRow(row)
                    row = next(rows)
                del row, rows

        gprint('Done Joining.  Creating output shapefiles.')

        coreBaseName = path.splitext(path.basename(cfg.COREFC))[0]

        outputFN = coreBaseName + "_Cluster" + str(int(
            cfg.MAXEUCDIST)) + "_dissolve.shp"
        outputShapefile = path.join(cfg.SCRATCHDIR, outputFN)
        arcpy.Dissolve_management(clusterFC, outputShapefile, cluster_ID)
        outputFN = coreBaseName + "_Cluster" + str(int(
            cfg.MAXEUCDIST)) + "_dissolve_area.shp"
        coreFCWithArea = path.join(cfg.SCRATCHDIR, outputFN)
        arcpy.CalculateAreas_stats(outputShapefile, coreFCWithArea)

        outputFN = coreBaseName + "_Cluster" + str(int(
            cfg.MAXEUCDIST)) + ".shp"
        clusterFCFinal = path.join(cfg.PROJECTDIR, outputFN)
        arcpy.CopyFeatures_management(clusterFC, clusterFCFinal)

        # Update final core featureclass with cluster ID and area
        arcpy.AddField_management(clusterFCFinal, cluster_ID, "LONG")
        arcpy.AddField_management(clusterFCFinal, "clust_area", "DOUBLE")

        #run through rows- get cluster id, then get area from coreFCwitharea using searchcursor

        rows = arcpy.UpdateCursor(clusterFCFinal)
        row = next(rows)
        while row:
            # linkCoords indices
            clustID = row.getValue(cluster_ID)
            rows2 = arcpy.SearchCursor(coreFCWithArea)
            row2 = next(rows2)
            while row2:
                if row2.getValue(cluster_ID) == clustID:
                    fArea = "F_AREA"
                    clustArea = row2.getValue(fArea)
                    break
                row2 = next(rows2)
            row.setValue("clust_area", clustArea)
            rows.UpdateRow(row)
            row = next(rows)
        del row, rows, row2, rows2
        gprint('Cores with cluster ID and cluster area written to: ' +
               clusterFCFinal)

        outlinkTableFile = path.join(cfg.DATAPASSDIR, 'linktable_clusters.csv')
        gprint('Writing ' + outlinkTableFile)
        lu.write_link_table(linkTable, outlinkTableFile)

        ##########################################################

    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 2. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Failed in step 2. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
Exemple #11
0
    arcpy.CopyFeatures_management("SH_all_islands_dissolved by spp and island",
                                  'sp_CH_data')

    #select species point data
    spp_pt_data = "%sSp_point_data2.shp" % (datadir)

    if os.path.exists(spp_pt_data):
        point_filtered = 1

        #REMOVE LARGE POLYGONS BASED ON POINT DATA
        arcpy.MultipartToSinglepart_management(
            "sp_CH_data",
            "sp_CH_data_multi")  #make each polygon a distinct feature
        n_original_polygons = arcpy.GetCount_management("sp_CH_data_multi")
        n_original_polygons = int(n_original_polygons[0])
        arcpy.CalculateAreas_stats(
            "sp_CH_data_multi", "sp_CH_data_multi_w_area")  #calc polygon areas
        #select only large polygons
        expr = '"F_AREA" > 1000000'  #select polygons greater than 1 sq km
        arcpy.SelectLayerByAttribute_management("sp_CH_data_multi_w_area", "",
                                                expr)
        arcpy.CopyFeatures_management(
            "sp_CH_data_multi_w_area", "sp_CH_data_multi_w_area_large"
        )  #Save large polygons selected as separate layer
        arcpy.DeleteFeatures_management(
            "sp_CH_data_multi_w_area"
        )  #temporarily deletes large polygons from species distribution map

        #determine which large polygons have points in them
        arcpy.SelectLayerByLocation_management("sp_CH_data_multi_w_area_large",
                                               "INTERSECT", spp_pt_data, "",
                                               "")
Exemple #12
0
x2 = fieldmappings.findFieldMapIndex("FID_xian2_")
fieldmappings.removeFieldMap(x2)
x3 = fieldmappings.findFieldMapIndex("Id")
fieldmappings.removeFieldMap(x3)

t5 = time.time()
arcpy.SpatialJoin_analysis(to_polygon, id_features, spatial_join5, "#", "#", fieldmappings, "SHARE_A_LINE_SEGMENT_WITH")
elapsed5 = (time.time()-t5)

print("finish part 5------------------")
print("time:",elapsed5)

time.sleep(2)


# 6 calculate
out_calculate = "finally_areas.shp"
t6 = time.time()
try:    
	# Process: Calculate Areas...    
	arcpy.CalculateAreas_stats(spatial_join5, out_calculate)
except:    
	# If an error occurred when running the tool, print out the error message.    
	print arcpy.GetMessages()

elapsed6 = (time.time()-t6)

print("finish part 6------------------")
print("time:",elapsed6)
Exemple #13
0
#Solve
arcpy.na.Solve(Treatment_SA, "SKIP")
#CHECK FOR ERRORS

#Save Injection transformers which were restricted by existing transformers
Injection_TX_restricted = arcpy.SelectData_management(Treatment_SA,
                                                      "Facilities")
Injection_TX_restricted = arcpy.SelectLayerByAttribute_management(
    Injection_TX_restricted, "NEW_SELECTION", '"Status"= 3')
arcpy.MakeFeatureLayer_management(Injection_TX_restricted,
                                  "Injection_TX_restricted")

#Export polygon layer
Treatment_Polygons = arcpy.SelectData_management(Treatment_SA, "Polygons")
Treatment_Polygons = Treatment_Polygons.getOutput(0)
Treatment_Polygons_withArea = arcpy.CalculateAreas_stats(
    Treatment_Polygons, "Treatment_Polygons")

#arcpy.MakeFeatureLayer_management(Treatment_Polygons_withArea,"Treatment_Polygons_Standard",'"F_AREA" >= 10000' )
arcpy.MakeFeatureLayer_management(Treatment_Polygons_withArea,
                                  "Treatment_Polygons_Small",
                                  '"F_AREA" <= 10000')

##T3##
#It is possible that the resulting polygon from the previous step is <10,000 sq meters or
#if an injection TX is completely engulfed in existing TX SA.
#In this case, a new SA is drawn extending 150m from the injection TX in all possible directions.
#This was the case with 3 treatment sites in Dansoman.

if arcpy.management.GetCount('Treatment_Polygons_Small')[0] > "0":
    Injection_TX_Alt = arcpy.SelectLayerByLocation_management(
        Injection_TX, "INTERSECT", 'Treatment_Polygons_Small', "15 Meters")
#Aqui se crea una nueva capa de PARCELAS privadas y de DOMinioPUBlico
#En la capa de DOMinioPUBlico se borran las entidades menores de 1m2 (basura)
arcpy.Erase_analysis("RUSTICO\\PARCELA.shp", "URBANO\\PARCELA.shp",
                     "AUXILIAR\\RECORTE.shp")

arcpy.Merge_management(["URBANO\\PARCELA.shp", "AUXILIAR\\RECORTE.shp"],
                       "AUXILIAR\\PARCELA_.shp")

clause = "TIPO='X'"
arcpy.Select_analysis("AUXILIAR\\PARCELA_.shp", "AUXILIAR\\DOMPUB_.shp",
                      clause)

arcpy.MultipartToSinglepart_management("AUXILIAR\\DOMPUB_.shp",
                                       "AUXILIAR\\DOMPUB_2.shp")
arcpy.CalculateAreas_stats("AUXILIAR\\DOMPUB_2.shp", "AUXILIAR\\DOMPUB_3.shp")
clause = "F_AREA > 1"
arcpy.Select_analysis("AUXILIAR\\DOMPUB_3.shp", "AUXILIAR\\DOMPUB_4.shp",
                      clause)
arcpy.Dissolve_management("AUXILIAR\\DOMPUB_4.shp", "AUXILIAR\\DOMPUB_5.shp")
arcpy.MultipartToSinglepart_management("AUXILIAR\\DOMPUB_5.shp", "DOMPUB")

clause = "TIPO<>'X'"
arcpy.Select_analysis("AUXILIAR\\PARCELA_.shp", "PARCELA.shp", clause)

#EQUIVALENCIA A LA DESCRIPCION DE ALTURAS DEL CATASTRO
#Existen tienen variaciones entre las categorias de alturas de los municipios
arcpy.Merge_management(["URBANO\\CONSTRU.shp", "RUSTICO\\CONSTRU.shp"],
                       "AUXILIAR\\CONSTRU_.shp")

clause = "NUMSYMBOL <13"
Exemple #15
0
import arcpy

fc = arcpy.GetParameterAsText(0)
fcout = arcpy.GetParameterAsText(1)

arcpy.CalculateAreas_stats(fc, fcout)

arcpy.AddMessage('-------------------')

arcpy.AddMessage('Area in Sq Meters')
arcpy.AddMessage('-------------------')
                
                      Existing_TX_Polygons_300m, "", "25 Meters")

#Solve
arcpy.na.Solve(Control_SA, "SKIP")
#CHECK FOR ERRORS

#Save Injection transformers which were restricted by existing transformers
Control_restricted = arcpy.SelectData_management(Control_SA, "Facilities")
Control_restricted = arcpy.SelectLayerByAttribute_management(
    Control_restricted, "NEW_SELECTION", '"Status"= 3')
arcpy.MakeFeatureLayer_management(Control_restricted, "Control_restricted")

#Calculate polycon areas
Control_Polygons = arcpy.SelectData_management(Control_SA, "Polygons")
Control_Polygons = Control_Polygons.getOutput(0)
Control_Polygons_withArea = arcpy.CalculateAreas_stats(Control_Polygons,
                                                       "Control_Polygons")

#arcpy.MakeFeatureLayer_management(Control_Polygons_withArea,"Control_Polygons_Standard",'"F_AREA" >= 10000' )
arcpy.MakeFeatureLayer_management(Control_Polygons_withArea,
                                  "Control_Polygons_Small",
                                  '"F_AREA" <= 10000')

if arcpy.management.GetCount('Control_Polygons_Small')[0] > "0":
    Control_Points_Alt = arcpy.SelectLayerByLocation_management(
        'Control_Points', "INTERSECT", 'Control_Polygons_Small', "15 Meters")
    arcpy.MakeFeatureLayer_management(Control_Points_Alt, "Control_Points_Alt")
    #Injection_TX_Alt = Injection_TX_Alt.getOutput(0)
    Control_Alt_SA = arcpy.na.MakeServiceAreaLayer(
        network, district + "_Control_Alt_SA", "Length", "TRAVEL_FROM", "150",
        "DETAILED_POLYS", "NO_MERGE", "DISKS", "NO_LINES", "NON_OVERLAP",
        "NO_SPLIT", Feeders, "", "", "", "TRIM_POLYS", distance_from_line)
    def execute(self, parameters, messages):
        """The source code of your tool."""
        arcpy.env.overwriteOutput = True
        arcpy.AddMessage(
            "Species distribution patterns around islands, generate a grid for analysis"
        )
        for param in parameters:
            arcpy.AddMessage("Parameter: %s = %s" %
                             (param.name, param.valueAsText))

        # See http://resources.arcgis.com/en/help/main/10.2/index.html#//018z00000063000000
        input_line = parameters[0].valueAsText
        output_directory = parameters[1].valueAsText
        distance = parameters[2].value
        simp_distance = parameters[3].value
        points_distance = parameters[4].value
        clean_up = parameters[5].value

        # 0 Describe files to set coordinate systems
        desc_input = arcpy.Describe(input_line)
        coord_system = desc_input.spatialReference
        arcpy.env.outputCoordinateSystem = coord_system

        # 1 Make output director and output files they do not exist
        if not os.path.exists(output_directory):
            os.makedirs(output_directory)

        arcpy.env.workspace = output_directory

        # 2 Buffer at distance around line
        arcpy.Buffer_analysis(
            in_features=input_line,
            out_feature_class=os.path.join(output_directory, "buffer.shp"),
            buffer_distance_or_field=str(distance) + " Meters",
            line_side="FULL",
            line_end_type="ROUND",
            dissolve_option="NONE",
            dissolve_field="",
            method="PLANAR")

        # 3 Convert polygon to Line
        def polys_to_lines(fc, new_fc):
            # From http://gis.stackexchange.com/questions/129662/creating-lines-from-polygon-borders-with-polygon-attributes-using-arcgis-arcpy
            path, name = os.path.split(new_fc)
            sm = 'SAME_AS_TEMPLATE'
            arcpy.CreateFeatureclass_management(path, name, 'POLYLINE', fc, sm,
                                                sm, fc)

            fields = [
                f.name for f in arcpy.ListFields(new_fc)
                if f.type not in ('OID', 'Geometry')
            ]

            # get attributes
            with arcpy.da.SearchCursor(fc, ['SHAPE@'] + fields) as rows:
                values = [(r[0].boundary(), ) + tuple(r[1:]) for r in rows]

            # insert rows
            with arcpy.da.InsertCursor(new_fc, ['SHAPE@'] + fields) as irows:
                for vals in values:
                    irows.insertRow(vals)
            return new_fc

        polys_to_lines(os.path.join(output_directory, "buffer.shp"),
                       os.path.join(output_directory, "lines.shp"))

        arcpy.MultipartToSinglepart_management(
            in_features=os.path.join(output_directory, "lines.shp"),
            out_feature_class=os.path.join(output_directory,
                                           "lines_explode.shp"))

        arcpy.SimplifyLine_cartography(
            in_features=os.path.join(output_directory, "lines_explode.shp"),
            out_feature_class=os.path.join(output_directory,
                                           "lines_explode_simplify.shp"),
            algorithm="BEND_SIMPLIFY",
            tolerance=str(simp_distance) + " Meters",
            error_resolving_option="FLAG_ERRORS",
            collapsed_point_option="KEEP_COLLAPSED_POINTS",
            error_checking_option="CHECK")

        # 4 Create points along the line

        arcpy.CreateFeatureclass_management(output_directory,
                                            "points_line.shp", 'POINT')

        arcpy.Project_management(
            in_dataset=os.path.join(output_directory,
                                    "lines_explode_simplify.shp"),
            out_dataset=os.path.join(output_directory,
                                     "lines_explode_simplify_proj.shp"),
            out_coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]",
            transform_method="",
            in_coor_system=coord_system,
            preserve_shape="NO_PRESERVE_SHAPE",
            max_deviation="",
            vertical="NO_VERTICAL")

        arcpy.CreateFeatureclass_management(output_directory,
                                            "points_line_outer.shp", 'POINT')
        arcpy.CreateFeatureclass_management(output_directory,
                                            "points_line_inner.shp", 'POINT')

        def points_along_line(line_lyr, pnt_layer, pnt_dist, inn_out):
            # From https://geonet.esri.com/thread/95549

            search_cursor = arcpy.da.SearchCursor(line_lyr, ['SHAPE@', 'FID'])
            insert_cursor = arcpy.da.InsertCursor(pnt_layer, 'SHAPE@')

            for row in search_cursor:
                if inn_out == "inner":
                    if row[1] == 1:
                        for dist in range(0, int(row[0].length),
                                          int(pnt_dist)):
                            point = row[0].positionAlongLine(dist).firstPoint
                            insert_cursor.insertRow([point])
                elif inn_out == "outer":
                    if row[1] == 0:
                        for dist in range(0, int(row[0].length),
                                          int(pnt_dist)):
                            point = row[0].positionAlongLine(dist).firstPoint
                            insert_cursor.insertRow([point])

        points_along_line(
            os.path.join(output_directory, "lines_explode_simplify_proj.shp"),
            os.path.join(output_directory, "points_line.shp"), points_distance,
            "inner")

        points_along_line(
            os.path.join(output_directory, "lines_explode_simplify_proj.shp"),
            os.path.join(output_directory, "points_line_outer.shp"), 1,
            "outer")

        points_along_line(
            os.path.join(output_directory, "lines_explode_simplify_proj.shp"),
            os.path.join(output_directory, "points_line_inner.shp"), 1,
            "inner")

        rows_list = [
            row for row in arcpy.da.SearchCursor(
                os.path.join(output_directory, "points_line.shp"), "FID")
        ]
        delete_cursor = arcpy.da.UpdateCursor(
            os.path.join(output_directory, "points_line.shp"), "FID")
        delete_value = rows_list[-1][0]

        for row in delete_cursor:
            if row[0] == delete_value:
                delete_cursor.deleteRow()

        arcpy.DefineProjection_management(
            os.path.join(output_directory, "points_line.shp"),
            coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]"
        )

        arcpy.DefineProjection_management(
            os.path.join(output_directory, "points_line_outer.shp"),
            coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]"
        )

        arcpy.DefineProjection_management(
            os.path.join(output_directory, "points_line_inner.shp"),
            coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]"
        )

        arcpy.Project_management(
            os.path.join(output_directory, "points_line.shp"),
            out_dataset=os.path.join(output_directory, "points_line_wgs.shp"),
            out_coor_system=coord_system,
            transform_method="",
            in_coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]",
            preserve_shape="NO_PRESERVE_SHAPE",
            max_deviation="",
            vertical="NO_VERTICAL")

        def points_to_polygon_line(output_directory, points):
            sr = arcpy.Describe(os.path.join(output_directory,
                                             points)).spatialReference

            coords = np.array(list(
                list(x) for x in
                arcpy.da.FeatureClassToNumPyArray(os.path.join(
                    output_directory, points), ["SHAPE@X", "SHAPE@Y"],
                                                  "",
                                                  sr,
                                                  explode_to_points=True)),
                              dtype="float64")

            arcpy.CreateFeatureclass_management(output_directory,
                                                "p2pl_li_" + points,
                                                "Polyline")
            arcpy.CreateFeatureclass_management(output_directory,
                                                "p2pl_pol_" + points,
                                                "Polygon")

            pnt = arcpy.Point()
            ary = arcpy.Array()

            cur = arcpy.InsertCursor(
                os.path.join(output_directory, "p2pl_pol_" + points))
            for coord in coords:
                pnt.X = coord[0]
                pnt.Y = coord[1]
                ary.add(pnt)
            polygon = arcpy.Polygon(ary)
            feat = cur.newRow()
            feat.shape = polygon
            cur.insertRow(feat)

            del cur, pnt, ary, coord, polygon, feat

            pnt = arcpy.Point()
            ary = arcpy.Array()

            cur = arcpy.InsertCursor(
                os.path.join(output_directory, "p2pl_li_" + points))
            for coord in coords:
                pnt.X = coord[0]
                pnt.Y = coord[1]
                ary.add(pnt)
            line = arcpy.Polyline(ary)
            feat = cur.newRow()
            feat.shape = line
            cur.insertRow(feat)
            del cur, pnt, ary, coord, line, feat

            arcpy.DefineProjection_management(
                os.path.join(output_directory, "p2pl_li_" + points),
                coor_system=
                "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]"
            )

            arcpy.DefineProjection_management(
                os.path.join(output_directory, "p2pl_pol_" + points),
                coor_system=
                "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]"
            )

            return

        points_to_polygon_line(output_directory, "points_line_inner.shp")

        points_to_polygon_line(output_directory, "points_line_outer.shp")

        # 5 Generate perpendicular lines
        desc = arcpy.Describe(os.path.join(output_directory,
                                           "points_line.shp"))
        sr = arcpy.SpatialReference(desc.spatialReference.factoryCode)

        dests = np.array(list(
            list(x) for x in arcpy.da.FeatureClassToNumPyArray(
                os.path.join(output_directory, "points_line_outer.shp"),
                ["SHAPE@X", "SHAPE@Y"], "", sr, True)),
                         dtype="float64")

        arcpy.CreateFeatureclass_management(output_directory,
                                            "intersect_lines.shp", "Polyline")

        search_cursor = arcpy.da.SearchCursor(
            os.path.join(output_directory, "points_line.shp"),
            ['SHAPE@X', 'SHAPE@Y'])

        for row in search_cursor:
            origin = np.array((row[0], row[1]), dtype="float64")
            deltas = dests - origin
            distances = np.hypot(deltas[:, 0], deltas[:, 1])
            min_dist = np.min(distances)
            wh = np.where(distances == min_dist)
            closest = dests[wh[0]]

            cur = arcpy.InsertCursor(
                os.path.join(output_directory, "intersect_lines.shp"))

            # Build array of start/ends
            line_array = arcpy.Array()
            # start point
            start = arcpy.Point()
            (start.ID, start.X, start.Y) = (1, origin.item(
                (0)), origin.item((1)))
            line_array.add(start)
            # end point
            end = arcpy.Point()
            (end.ID, end.X, end.Y) = (2, closest.item(
                (0, 0)), closest.item((0, 1)))
            line_array.add(end)
            # write our fancy feature to the shapefile
            feat = cur.newRow()
            feat.shape = line_array
            cur.insertRow(feat)
            # yes, this shouldn't really be necessary...
            line_array.removeAll()
            del origin

        arcpy.DefineProjection_management(
            os.path.join(output_directory, "intersect_lines.shp"),
            coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]"
        )

        arcpy.Union_analysis(in_features=[
            os.path.join(output_directory, "p2pl_pol_points_line_inner.shp"),
            os.path.join(output_directory, "p2pl_pol_points_line_outer.shp")
        ],
                             out_feature_class=os.path.join(
                                 output_directory, "simplified_polygon.shp"),
                             join_attributes="ALL",
                             cluster_tolerance="",
                             gaps="GAPS")

        with arcpy.da.UpdateCursor(
                os.path.join(output_directory, "simplified_polygon.shp"),
                "FID") as cursor:
            for row in cursor:
                if row[0] == 1:
                    cursor.deleteRow()

        arcpy.Buffer_analysis(in_features=os.path.join(output_directory,
                                                       "intersect_lines.shp"),
                              out_feature_class=os.path.join(
                                  output_directory, "intersect_lines_buf.shp"),
                              buffer_distance_or_field="1 Centimeters",
                              line_side="FULL",
                              line_end_type="ROUND",
                              dissolve_option="NONE",
                              dissolve_field="",
                              method="PLANAR")

        arcpy.Union_analysis(in_features=[
            os.path.join(output_directory, "intersect_lines_buf.shp"),
            os.path.join(output_directory, "simplified_polygon.shp")
        ],
                             out_feature_class=os.path.join(
                                 output_directory,
                                 "intersect_lines_buffered_polygon.shp"),
                             join_attributes="ALL",
                             cluster_tolerance="",
                             gaps="GAPS")

        arcpy.Project_management(
            os.path.join(output_directory,
                         "intersect_lines_buffered_polygon.shp"),
            out_dataset=os.path.join(
                output_directory, "intersect_lines_buffered_polygon_proj.shp"),
            out_coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]",
            transform_method="",
            in_coor_system=coord_system,
            preserve_shape="NO_PRESERVE_SHAPE",
            max_deviation="",
            vertical="NO_VERTICAL")

        arcpy.CalculateAreas_stats(
            Input_Feature_Class=os.path.join(
                output_directory, "intersect_lines_buffered_polygon_proj.shp"),
            Output_Feature_Class=os.path.join(
                output_directory,
                "intersect_lines_buffered_polygon_areas.shp"))

        polygon_sizes = []

        with arcpy.da.SearchCursor(
                os.path.join(output_directory,
                             "intersect_lines_buffered_polygon_areas.shp"),
                "F_Area") as cursor:
            for row in cursor:
                polygon_sizes.append(int(row[0]))

        polygon_sizes = sorted(polygon_sizes, key=int, reverse=True)

        with arcpy.da.UpdateCursor(
                os.path.join(output_directory,
                             "intersect_lines_buffered_polygon_areas.shp"),
                "F_Area") as cursor:
            for row in cursor:
                if int(row[0]) == int(polygon_sizes[0]):
                    pass
                else:
                    cursor.deleteRow()

        arcpy.MultipartToSinglepart_management(in_features=os.path.join(
            output_directory, "intersect_lines_buffered_polygon_areas.shp"),
                                               out_feature_class=os.path.join(
                                                   output_directory,
                                                   "grid_file1.shp"))

        arcpy.Project_management(
            os.path.join(output_directory, "grid_file1.shp"),
            out_dataset=os.path.join(output_directory, "grid_file.shp"),
            out_coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]",
            transform_method="",
            in_coor_system=coord_system,
            preserve_shape="NO_PRESERVE_SHAPE",
            max_deviation="",
            vertical="NO_VERTICAL")

        arcpy.AddField_management(in_table=os.path.join(
            output_directory, "grid_file.shp"),
                                  field_name="GRID_ID",
                                  field_type="LONG",
                                  field_precision="",
                                  field_scale="",
                                  field_length="",
                                  field_alias="",
                                  field_is_nullable="NULLABLE",
                                  field_is_required="NON_REQUIRED",
                                  field_domain="")

        with arcpy.da.UpdateCursor(
                os.path.join(output_directory, "grid_file.shp"),
            ["FID", "GRID_ID"]) as cursor:
            for row in cursor:
                row[1] = row[0]
                cursor.updateRow(row)

        arcpy.Project_management(
            os.path.join(output_directory, "grid_file.shp"),
            out_dataset=os.path.join(output_directory, "grid_file_wgs.shp"),
            out_coor_system=coord_system,
            transform_method="",
            in_coor_system=
            "PROJCS['World_Mercator',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],UNIT['Meter',1.0]]",
            preserve_shape="NO_PRESERVE_SHAPE",
            max_deviation="",
            vertical="NO_VERTICAL")

        arcpy.AddWarning(
            "The field name GRID_ID will likely not be sequential, and you will have to fix manually."
        )

        arcpy.AddMessage(
            "After fixing the non-sequentially spaced names, you will have to manually join "
            "the layer back to the original point observations.")

        if clean_up:
            delete_list = [
                os.path.join(output_directory, "buffer.shp"),
                os.path.join(output_directory, "lines.shp"),
                os.path.join(output_directory, "lines_explode.shp"),
                os.path.join(output_directory, "lines_explode_simplify.shp"),
                os.path.join(output_directory,
                             "lines_explode_simplify_Pnt.shp"),
                os.path.join(output_directory,
                             "intersect_lines_buffered_polygon_areas.shp"),
                os.path.join(output_directory,
                             "intersect_lines_buffered_polygon_proj.shp"),
                os.path.join(output_directory,
                             "lines_explode_simplify_proj.shp"),
                os.path.join(output_directory,
                             "p2pl_li_points_line_inner.shp"),
                os.path.join(output_directory,
                             "p2pl_li_points_line_outer.shp"),
                os.path.join(output_directory,
                             "p2pl_pol_points_line_inner.shp"),
                os.path.join(output_directory,
                             "p2pl_pol_points_line_outer.shp"),
                os.path.join(output_directory, "points_line.shp"),
                os.path.join(output_directory, "points_line_inner.shp"),
                os.path.join(output_directory, "points_line_outer.shp"),
                os.path.join(output_directory, "points_line_wgs.shp"),
                os.path.join(output_directory, "simplified_polygon.shp"),
                os.path.join(output_directory, "grid_file1.shp"),
                os.path.join(output_directory, "intersect_lines.shp"),
                os.path.join(output_directory, "intersect_lines_buf.shp"),
                os.path.join(output_directory,
                             "intersect_lines_buffered_polygon.shp"),
            ]
            for i in delete_list:
                arcpy.Delete_management(i)

        return
distanceField = "%s Meters" % (list_buffer_distance_m / 2)
sideType = ""
endType = ""
dissolveType = "NONE"
dissolveField = ""
arcpy.Buffer_analysis(islands_infested, eradication_zone_temp, distanceField,
                      sideType, endType, dissolveType, dissolveField)

#Disssolve

group_dissolve(eradication_zone_temp, eradication_zone, 80, path_process)

# Calculate area for the visual buffer and clean up
arcpy.Delete_management(eradication_zone_temp)
arcpy.CalculateAreas_stats(eradication_zone, eradication_zone_temp)
arcpy.Delete_management(eradication_zone)
arcpy.Copy_management(eradication_zone_temp, eradication_zone)
arcpy.Delete_management(eradication_zone_temp)
arcpy.DefineProjection_management(eradication_zone, file_projection)

# Add some fields for the visual buffer:
# - index number
arcpy.AddField_management(eradication_zone, "z_nr", "LONG", "9", "", "",
                          "z_nr", "NULLABLE", "REQUIRED")

# - Zone area (area covered by this zone
arcpy.AddField_management(eradication_zone, "z_area", "FLOAT", "12", "", "",
                          "z_area", "NULLABLE", "REQUIRED")

# - Total islands in zone
Exemple #19
0
fin = mapping.Layer(r"fin")
mapping.InsertLayer(df, refLayer, fin, "BEFORE")
#--------------------------------------To extrate suitalbe reginons' area in China----------------------------------------#
# Execute SelectLayerByAttribute
arcpy.SelectLayerByAttribute_management("fin", "NEW_SELECTION", "\"VALUE\" =0")
# Execute RasterToPolygon
arcpy.RasterToPolygon_conversion("fin", "zones.shp", "NO_SIMPLIFY", "value")
# Execute IntersectAnalysis
arcpy.Intersect_analysis(["zones.shp", input1], "Intersect", "ALL", "", "")
# Execute ConversationReference
arcpy.DefineProjection_management(
    "Intersect.shp",
    "PROJCS['Sphere_Aitoff',GEOGCS['GCS_Sphere',DATUM['D_Sphere',SPHEROID['Sphere',6371000.0,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Aitoff'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],UNIT['Meter',1.0]]"
)
# Execute AddFileld
arcpy.CalculateAreas_stats("Intersect.shp", "IntersectAreaField.shp")
# Execute CountAreas
arcpy.Statistics_analysis("IntersectAreaField.shp", "CountAreas1",
                          [["F_AREA", "SUM"]], "PYNAME")
#-------------------------------------To extrate suitalbe reginons' area across the globle--------------------------------#
# Execute IntersectAnalysis
arcpy.Intersect_analysis(["zones.shp", input2], "Intersect1", "ALL", "", "")
# Execute ConversationReference
arcpy.DefineProjection_management(
    "Intersect1.shp",
    "PROJCS['Sphere_Aitoff',GEOGCS['GCS_Sphere',DATUM['D_Sphere',SPHEROID['Sphere',6371000.0,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Aitoff'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],UNIT['Meter',1.0]]"
)
# Execute AddFileld
arcpy.CalculateAreas_stats("Intersect1.shp", "IntersectAreaField1.shp")
# Execute CountAreas
arcpy.Statistics_analysis("IntersectAreaField1.shp", "CountAreas2",
Exemple #20
0
        print "    End time is", str(datetime.datetime.now())

        print "  Reprojecting {} loss area to World Eckert IV".format(year)
        print "    Start time is", str(datetime.datetime.now())
        out_coordinate_system = "PROJCS['World_Eckert_IV',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Eckert_IV'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],UNIT['Meter',1.0]]"
        arcpy.Project_management(
            legal_loss_year_PRODES_noFire_neighbor_shp_dissolve,
            legal_loss_year_PRODES_noFire_neighbor_shp_dissolve_reproj,
            out_coor_system=out_coordinate_system,
            transform_method="")
        print "    End time is", str(datetime.datetime.now())

        print "  Calculating feature areas"
        print "    Start time is", str(datetime.datetime.now())
        arcpy.CalculateAreas_stats(
            legal_loss_year_PRODES_noFire_neighbor_shp_dissolve_reproj,
            legal_loss_year_PRODES_noFire_neighbor_shp_dissolve_reproj_areas)
        print "    End time is", str(datetime.datetime.now())

        # Keeps only the features that are larger than 6.25 ha (Brazil exclusion criteria 4)
        print "  Selecting features larger than 6.25 ha"
        print "    Start time is", str(datetime.datetime.now())
        where = "F_AREA > 62500"
        arcpy.MakeFeatureLayer_management(
            legal_loss_year_PRODES_noFire_neighbor_shp_dissolve_reproj_areas,
            "layer_{}".format(year),
            where_clause=where)
        arcpy.CopyFeatures_management(
            "layer_{}".format(year),
            legal_loss_year_PRODES_noFire_neighbor_shp_dissolve_reproj_areas_large
        )
Exemple #21
0
#Create temporary directory and define temporary files
os.mkdir(temp_dir)
buffer_shp = os.path.join(temp_dir, 'BUFFER.shp')
intersect_shp = os.path.join(temp_dir, 'INTERSECTED.shp')
area_shp = os.path.join(temp_dir, 'AREA.shp')

try:
    arcpy.AddMessage('Creating Temporary Shapefiles')
    if arcpy.ProductInfo() == 'ArcInfo':
        arcpy.Buffer_analysis(route_file, buffer_shp, assumed_lane_width, dissolve_option = 'ALL', line_side = 'RIGHT', line_end_type = 'FLAT')
    else:
        arcpy.AddMessage('WARNING: Results will be more accurate with ArcInfo license')
        arcpy.Buffer_analysis(route_file, buffer_shp, assumed_lane_width, dissolve_option = 'ALL')
    arcpy.Intersect_analysis([polygon_file, buffer_shp], intersect_shp)
    arcpy.CalculateAreas_stats(intersect_shp, area_shp)

    arcpy.AddMessage('Approximating Stops By Polygon')
    stops = {}
    polygons = arcpy.da.SearchCursor(area_shp, field_names = [id_field, 'F_AREA'])
    for polygon in polygons:
        if polygon[0] not in stops:
            stops[polygon[0]] = 0
        stops[polygon[0]] += polygon[1] / (lane_width * spacing)
    del polygons

    arcpy.AddMessage('Adding Approximate Number of Stops to Polygon File')
    if new_field in [field.name for field in arcpy.ListFields(polygon_file)]:
        arcpy.DeleteField_management(polygon_file, new_field)
    if return_integer:
        arcpy.AddField_management(polygon_file, new_field, 'LONG')
# Execute Reclassify
outReclassify2 = Reclassify(inRaster2, reclassField, remap, "NODATA")

# Save the output
outReclassify1.save(inRaster4)

# Execute RasterToPolygon
arcpy.RasterToPolygon_conversion(inRaster3, before, "NO_SIMPLIFY", field)

arcpy.RasterToPolygon_conversion(inRaster4, after, "NO_SIMPLIFY", field)

xyTol = "1 Meters"
arcpy.Erase_analysis(after, before, eraseOutput, xyTol)

# Process: Calculate Areas...
arcpy.CalculateAreas_stats(eraseOutput, calculate_output)

# # Execute Select
# arcpy.Select_analysis(in_features, out_feature_class, where_clause)

# # Create a new fieldmappings and add the two input feature classes.
# fieldmappings = arcpy.FieldMappings()
# fieldmappings.addTable(targetFeatures)
# fieldmappings.addTable(joinFeatures)

# # First get the POP1990 fieldmap. POP1990 is a field in the cities feature class.
# # The output will have the states with the attributes of the cities. Setting the
# # field's merge rule to mean will aggregate the values for all of the cities for
# # each state into an average value. The field is also renamed to be more appropriate
# # for the output.
# pop1990FieldIndex = fieldmappings.findFieldMapIndex("F_AREA")
Exemple #23
0
def ExtractRange(outRaster, outFilePath, file):
    inSQLClause = "VALUE > 0"
    try:
        # Execute ExtractByAttributes
        attExtract = arcpy.sa.ExtractByAttributes(outRaster, inSQLClause)
        print('87')
        sys.stdout.flush()
        # Save the output
        #attExtract.save("F:\\ree\\PM25T08.tif")
        rasfile = os.path.split(outRaster)[1]
        in_point_features = os.path.join(file, u"RasterToPoint_conversion.shp")
        out_feature_class = os.path.join(file,
                                         u"AggregatePoints_cartography.shp")
        out_SmoothPolygon_class = os.path.join(file,
                                               u"out_SmoothPolygon_class.shp")
        calculate_output = os.path.join(file, u"calculate_output.shp")
        try:
            arcpy.RasterToPoint_conversion(attExtract, in_point_features,
                                           "VALUE")
        except:
            pass
        try:
            arcpy.AggregatePoints_cartography(in_point_features,
                                              out_feature_class, 30)
        except:
            pass
        try:
            arcpy.SmoothPolygon_cartography(out_feature_class,
                                            out_SmoothPolygon_class, 'PAEK',
                                            30)
        except:
            pass
        try:
            # Process: Calculate Areas...
            arcpy.CalculateAreas_stats(out_SmoothPolygon_class,
                                       calculate_output)
        except:
            # If an error occurred when running the tool, print out the error message.
            traceback.print_exc()
        try:
            arcpy.Delete_management(in_point_features)
        except:
            traceback.print_exc()
        try:
            arcpy.DeleteFeatures_management(out_SmoothPolygon_class)
        except:
            traceback.print_exc()
        try:
            arcpy.DeleteFeatures_management(out_feature_class)
        except:
            traceback.print_exc()
        try:
            arcpy.Delete_management(out_feature_class)
        except:
            traceback.print_exc()
        try:
            arcpy.Delete_management(out_SmoothPolygon_class)
        except:
            traceback.print_exc()

    except Exception as err:
        arcpy.AddMessage("ExtractByAttributes Failed")
        arcpy.AddMessage(err)
        traceback.print_exc()
        return

#%%
# CLIP TO STATES 
in_features = "//Intermediates//Census_Station_P3_Nowater.shp"
clip_features = "//Boundary Files//States_equidistant.shp"
out_feature_class = "//Intermediates//Census_Station_Clip3.shp"
xy_tolerance = ""

# Execute Clip
arcpy.Clip_analysis(in_features, clip_features, out_feature_class, xy_tolerance)

#%%

# FIGURE OUT WEIGHTING SCHEME BASED ON AREA
arcpy.CalculateAreas_stats("//Intermediates//Census_Station_Clip3.shp", "//Output//Census_Station_"+record_length+"_"+missing+".shp")
#arcpy.Delete_management("in_memory","")




#%%
#switching over to R now to read the .dbf and find weighted mean





#%%
# UNION OF STATES AND THEISSEN POLYGONS 
# LINK STATION POLYGONS TO CENSUS
Exemple #25
0
# Process: Polygon to Raster (2)
arcpy.PolygonToRaster_conversion(lu_01, "LANDUSE", lu_2001, "CELL_CENTER",
                                 "NONE", "10")

# Process: Reclassify (2)
arcpy.gp.Reclassify_sa(
    lu_2001, "LANDUSE",
    "1420 7;4300 3;1320 6;1222 5;1440 7;1240 10;1110 1;1330 6;1231 5;1232 5;1540 8;1520 8;1430 7;1310 6;1223 5;1130 2;1350 6;4110 3;1530 8;3100 10;1370 6;1360 6;1212 5;1560 8;4220 3;1250 5;3500 10;1511 8;5200 11;4210 3;1512 8;5100 11;3200 10;1340 6;3600 10;1120 3;3300 10;1550 8;1211 5;4120 3;1410 7;5300 11;1140 3;3400 10;2100 9",
    Reclass_lu_01, "DATA")

# Process: Raster to Polygon (2)
arcpy.RasterToPolygon_conversion(Reclass_lu_01, lu2001_recl_shp, "SIMPLIFY",
                                 "LANDUSE")

# Process: Calculate Areas (2)
arcpy.CalculateAreas_stats(lu2001_recl_shp, lu2001_recl_a_shp)

# Process: Polygon to Raster (3)
arcpy.PolygonToRaster_conversion(lu_2005, "LANDUSE", lu_2005__3_,
                                 "CELL_CENTER", "NONE", "10")

# Process: Reclassify (3)
arcpy.gp.Reclassify_sa(
    lu_2005__3_, "LANDUSE",
    "1420 7;4300 3;1320 6;1222 5;1440 7;1240 10;1110 1;1330 6;1231 5;1232 5;1540 8;1520 8;1430 7;1310 6;1223 5;1130 2;1350 6;4110 3;1530 8;3100 10;1370 6;1360 6;1212 5;1560 8;4220 3;1250 5;3500 10;1511 8;5200 11;4210 3;1512 8;5100 11;3200 10;1340 6;3600 10;1120 3;3300 10;1550 8;1211 5;4120 3;1410 7;5300 11;1140 3;3400 10;2100 9",
    Reclass_lu_05, "DATA")

# Process: Raster to Polygon (3)
arcpy.RasterToPolygon_conversion(Reclass_lu_05, lu2005_recl_shp, "SIMPLIFY",
                                 "LANDUSE")
Exemple #26
0
import arcpy
arcpy.env.workspace = r'C:\Users\Luis\Downloads\nhn_rhn_05ck000_shp_en'
arcpy.CalculateAreas_stats('NHN_05CK000_1_0_HD_WATERBODY_2.shp','areaWaterbody.shp')
def CalShpArea(inshp, outshp, isprint):
    arcpy.CalculateAreas_stats(inshp, outshp)
    if isprint:
        print('CalShpArea is finished....')
Exemple #28
0
def main(*args):
    #Read in inputs
    from_shp_file = args[0]
    from_field = args[1]
    to_shp_file = args[2]
    to_field = args[3]
    outfile = args[4]
    show_matrix = args[5]
    remove_temp_if_successful = args[6]
    remove_temp_if_error = args[7]

    if from_field == to_field:
        to_field += '_1'

    #Check if the outfile is specified as a csv file. If it isn't, do so.
    if outfile[-4:] != '.csv':
        outfile += '.csv'

    #Create temporary directory
    temp_dir = r'C:\TEMP'
    os.mkdir(temp_dir)
    temp_shp = os.path.join(temp_dir, 'TEMP.shp')
    from_shp = os.path.join(temp_dir, 'FROM.shp')
    to_shp = os.path.join(temp_dir, 'TO.shp')

    #Copy input shapefiles into temporary directory
    arcpy.CopyFeatures_management(from_shp_file, from_shp)
    arcpy.CopyFeatures_management(to_shp_file, to_shp)

    #Process the data. If an error occurs, the temporary directory will be deleted, and then the exception will be raised
    try:

        #Intersect the two shapefiles and calculate the area of the intersected shapefile
        arcpy.Intersect_analysis([from_shp, to_shp], temp_shp)
        temp2_shp = temp_shp.replace('.shp', '2.shp')
        arcpy.CalculateAreas_stats(temp_shp, temp2_shp)

        #Create a list of all of the origin and destination polygons
        from_list = []
        to_list = []
        polygons = arcpy.da.SearchCursor(temp_shp, [from_field, to_field])
        for polygon in polygons:
            from_list += [polygon[0]]
            to_list += [polygon[1]]
        del polygons

        from_codes = pd.Series(from_list).value_counts().index
        to_codes = pd.Series(to_list).value_counts().index

        #Create matrix with total area of each intersected polygon, arranged by the from polygon and to polygon
        areas = pd.DataFrame(np.zeros((len(to_codes), len(from_codes))),
                             index=to_codes,
                             columns=from_codes)
        polygons = arcpy.da.SearchCursor(temp2_shp,
                                         [from_field, to_field, 'F_AREA'])
        for polygon in polygons:
            areas.loc[polygon[1], polygon[0]] = polygon[2]
        del polygons

        #Divide each column of the matrix by its sum
        total = areas.sum(0)
        out_data = areas.copy()
        for row in out_data.index:
            out_data.loc[row] /= total

        #Write to csv, and delete the temporary directory
        out_data.to_csv(outfile)
        if remove_temp_if_successful:
            clear_temp()

    except Exception as e:
        if remove_temp_if_error:
            clear_temp()
        exc_type, exc_obj, exc_tb = sys.exc_info()
        print(exc_tb.tb_lineno)
        raise e

    #Open the file if instructed to do so
    if show_matrix:
        Popen(outfile, shell=True)
Exemple #29
0
def IceCliffLocation(workspace,dem,tileDebarea,pixel,skinny,minSlope,n_iterations,L_e,alpha,beta_e,A_min,phi,gamma):
    import sys
    import os
    import arcpy
    from arcpy import env
    from arcpy.sa import Slope, ExtractByMask, Raster, SetNull, Int
    import matplotlib.pyplot as plt
    import numpy as np
    from numpy import array
    from scipy.optimize import curve_fit
    env.overwriteOutput = True

    try:
        import arcinfo
    except:
        sys.exit("ArcInfo license not available")
        arcpy.AddMessage("ArcInfo license not available")
    if arcpy.CheckExtension("spatial") == "Available":
        arcpy.CheckOutExtension("spatial")
    else:
        sys.exit("Spatial Analyst license not available")
        arcpy.AddMessage("Spatial Analyst license not available")
        
    #Parameters that should be stable:
    slopeLimit = 90 # slope detection capped at this value
    
    ## Loop for optimizing slope
    if str(workspace.split("\\")[-1]) == 'Final':
        n = []
        n.append(minSlope)        
    else:
        minSlope = 0
        n = np.arange(minSlope,slopeLimit,(slopeLimit-minSlope)/n_iterations)

    skipIteration = []
    for minSlope in n:
        
        # check for existing iterations if code has previously run but crashed. 
        if arcpy.ListFeatureClasses("*cliffMap*"):
            fcListPrior = arcpy.ListFeatureClasses("*cliffMap*")
            skipIteration = []
            for prior_i in fcListPrior:
                if int(prior_i[14:16]) == int("%02d" % (int(minSlope),)):
                    skipIteration = 1
        if skipIteration == 1:
            continue

        ## Ice Cliff code  
        if skinny == 'false':
            print 'IceCliffLocation script started...'
        if skinny == 'true':
            print 'skinny IceCliffLocation script started...'
            
        # Parameter that probably should be 0
        minProb = 0 # probability associated with minSlope.
        
        arcpy.CopyFeatures_management(tileDebarea, workspace+"\\del_debarea.shp")
        debarea_iteration = workspace+"\\del_debarea.shp"
        arcpy.env.snapRaster = dem
        outExtractSlope = ExtractByMask(dem, debarea_iteration)

        outExtractSlope.save("dem_extract.TIF")
        if int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))) == pixel:
            dem = "dem_extract.TIF"
        else:    
            arcpy.Resample_management("dem_extract.TIF", "dem_extractResample.TIF", pixel, "NEAREST")
            arcpy.env.snapRaster = dem
            print "DEM resampeld from "+str(int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))))+' to '+str(pixel)
            dem = "dem_extractResample.TIF"
        
        # Create slope raster
        outSlope = Slope(dem, "DEGREE", 1)
        outSlope.save("del_slope.TIF")
    
        # Isolate slope values above minSlope 
        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(minSlope))
        outSetNull.save("del_minSlope.TIF")       
    
        # Exit process if no cliffs exist
        nocliff = arcpy.GetRasterProperties_management(Int("del_minSlope.TIF"), "ALLNODATA")
        if int(str(nocliff)) == 1:
            print "No area with a slope above "+str(minSlope)+"."
        elif float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MINIMUM"))) == 0:
            print "Only one pixel with a slope above "+str(minSlope)+", iteration skipped."
        else:
            minMean = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "MEAN"))) 
            minSD = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "STD"))) 

            areaSlope = minMean
            
            print 'areaSlope = ' + str(areaSlope)
            
            # Isolate slope values above areaSlope 
            outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(areaSlope))
            outSetNull.save("del_areaSlope.TIF")
            arcpy.env.snapRaster = dem  
                        
            # Exit process if no cliffs exist
            nocliff = arcpy.GetRasterProperties_management(Int("del_areaSlope.TIF"), "ALLNODATA")
            if int(str(nocliff)) == 1:
                print "No area with a slope above "+str(areaSlope)+"."
            elif float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MINIMUM"))) == 0:
                print "Only one pixel with a slope above "+str(areaSlope)+", iteration skipped."
            else: 
                seedSlope = minMean+minSD 
                print 'seedSlope = ' + str(seedSlope)
                
                # Isolate slope values above areaSlope 
                outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope))
                outSetNull.save("del_seedSlope.TIF")

                # Exit process if no cliffs exist
                nocliff = arcpy.GetRasterProperties_management(Int("del_seedSlope.TIF"), "ALLNODATA")
                if int(str(nocliff)) == 1:
                    print "No seed area with a slope above "+str(seedSlope)+"."
                else:                    
                    # to int speeds up computation time
                    outInt = Int("del_areaSlope.TIF")
                    outInt.save("del_minSlopeInt.TIF")
                    outInt = Int("del_seedSlope.TIF")
                    outInt.save("del_seedSlopeInt.TIF")                  
                        
                    arcpy.RasterToPolygon_conversion("del_minSlopeInt.TIF", "del_minCliffSlope.shp", "NO_SIMPLIFY", "VALUE")
                    arcpy.AddField_management("del_minCliffSlope.shp", "value", "SHORT", 1, "", "", "", "", "")
                    arcpy.Dissolve_management("del_minCliffSlope.shp", "del_minCliff_dissolve.shp", "value")
                    arcpy.MultipartToSinglepart_management("del_minCliff_dissolve.shp", "del_minCliff_explode.shp")
                    arcpy.AddField_management("del_minCliff_explode.shp",'Area','FLOAT')
                    rows = arcpy.UpdateCursor("del_minCliff_explode.shp")
                    for row in rows:
                        areacliff = row.shape.area
                        row.Area = areacliff 
                        rows.updateRow(row)
                    del row, rows
                    arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"_CliffArea.shp")
                    
                    # skinny/non-skinny fix for ending iteration. 0 = no skip, 1 = skip
                    skip_iter = 0 
                    
                    # skinny ice cliffs, does not include ice cliff end extension to speed up computations
                    if skinny == 'true':
                        if arcpy.management.GetCount("del_minCliff_explode.shp")[0] == "0":
                            skip_iter = 1
                            print "No area within del_minCliff_explode.shp, skinny iteration skipped."
                        else:
                            # "_FinalCliffShape.shp" and "_cliffArea.shp" are the same if skinny == true
                            arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp")
                            # copy working .shp, used below
                            arcpy.CopyFeatures_management('del_minCliff_explode.shp', 'del_lineAndArea_area.shp')
                            arcpy.CalculateAreas_stats('del_minCliff_explode.shp', 'del_lineAndArea_area.shp')
                            arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer')
                            expression = 'F_AREA <=' + str((pixel**2)*A_min)
                            arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression)
                            arcpy.DeleteFeatures_management('tempLayer')
                            arcpy.Delete_management('tempLayer')

                    if skinny == 'false':    
                        # buffer in/out area to break up attached features
                        arcpy.Buffer_analysis("del_minCliff_explode.shp", "del_extendLineBuffer.shp", (pixel/2)-0.1, "FULL", "ROUND", "NONE")
    
                        # Generate ice cliff centerlines from Voronoi cells
                        if arcpy.management.GetCount("del_extendLineBuffer.shp")[0] == "0":
                            arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp")
                            skip_iter = 1
                            print "No area within the criteria defined by seed area value "+str(seedSlope)+", iteration stopped before centerlines."
                        else:
                            arcpy.FeatureToLine_management("del_extendLineBuffer.shp","del_line.shp","","ATTRIBUTES")
                            arcpy.Densify_edit("del_line.shp", "","5", "", "")
                            arcpy.FeatureVerticesToPoints_management ("del_line.shp", "del_verti.shp", "ALL")
                            arcpy.CreateThiessenPolygons_analysis("del_verti.shp","del_voronoiCells.shp" ,"ONLY_FID") 
                            arcpy.RepairGeometry_management("del_voronoiCells.shp")
                            
                            #use geodatabase here due to unexpected error: "Invalid Topology [Duplicate segment.]"
                            arcpy.CreateFileGDB_management(workspace, "fGDB.gdb")
                            fgdb = workspace+"\\fGDB.gdb"
                            #arcpy.env.workspace = fgdb
                            arcpy.Clip_analysis(workspace+"\\del_voronoiCells.shp", workspace+"\\del_extendLineBuffer.shp", fgdb+"\\shp","")
                            arcpy.FeatureToLine_management(fgdb+"\\shp", workspace+"\\del_toLine.shp", "", attributes="ATTRIBUTES")
                            arcpy.Delete_management(fgdb)
                            #arcpy.env.workspace = workspace
                            
                            #arcpy.FeatureToLine_management("del_voronoiCellsClip.shp","del_toLine.shp", "", attributes="ATTRIBUTES")
                            arcpy.MakeFeatureLayer_management("del_toLine.shp", "tempLayer", "", "", "")
                            arcpy.SelectLayerByLocation_management("tempLayer", "CROSSED_BY_THE_OUTLINE_OF","del_minCliff_explode.shp","","NEW_SELECTION")
                            arcpy.DeleteFeatures_management("tempLayer")
                            arcpy.Delete_management("tempLayer")
                            arcpy.Intersect_analysis(["del_toLine.shp",'del_minCliff_explode.shp'],"del_lineIntersect.shp")
                            arcpy.Dissolve_management("del_lineIntersect.shp", "del_toLineDis.shp", "", "", "SINGLE_PART", "DISSOLVE_LINES")
                            arcpy.UnsplitLine_management("del_toLineDis.shp","del_unsplit.shp","Id")
                            arcpy.MakeFeatureLayer_management("del_unsplit.shp", "tempLayer2", "", "", "")
                            arcpy.SelectLayerByLocation_management("tempLayer2", "BOUNDARY_TOUCHES","del_minCliff_explode.shp","","NEW_SELECTION")
                            arcpy.DeleteFeatures_management("tempLayer2")
                            arcpy.Delete_management("tempLayer2")
                            arcpy.cartography.SimplifyLine("del_unsplit.shp","del_clineSimpExp.shp","POINT_REMOVE",10)
                            arcpy.AddField_management("del_clineSimpExp.shp", "value", "SHORT", 1, "", "", "", "", "")
                            arcpy.Dissolve_management("del_clineSimpExp.shp", "del_clineSimp.shp", "value")
                            arcpy.TrimLine_edit("del_clineSimp.shp", "8 meters", "KEEP_SHORT")
                            arcpy.CopyFeatures_management("del_unsplit.shp", "min"+str("%02d" % (minSlope,))+"_Centerlines.shp")
                            
                            #refine centerline for final map
                            if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0":
                                arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp")
                                skip_iter = 1
                                print "No area big enough to generate a centerline, iteration skipped."
                            else:                        
                            
                                # extend lines to capture cliff ends
                                count = 0
                                print "Extend line started..."
                                
                                jlist = [(pixel/2)-0.1] * int(round(L_e/(pixel/2)))
                                for j in jlist:
                                    #create buffer out to set the limit a line will be extended to
                                    arcpy.Buffer_analysis("del_clineSimp.shp", "del_clineSimpBuff1.shp", j, "FULL", "ROUND", "ALL")
                                    arcpy.PolygonToLine_management("del_clineSimpBuff1.shp","del_clineSimpBuff1line.shp")
                                    #merge centerline and bufferline
                                    arcpy.Merge_management(["del_clineSimp.shp","del_clineSimpBuff1line.shp"], "del_clineSimpBuff1merge_dis.shp")
                                    arcpy.Delete_management("del_clineSimp.shp")
                                    print "Extend line "+str(count)+" started..."
                                    arcpy.MultipartToSinglepart_management("del_clineSimpBuff1merge_dis.shp", "del_clineSimpBuff1merge.shp")
                                    arcpy.MakeFeatureLayer_management("del_clineSimpBuff1merge.shp", "lineLayer", "", "", "")
                                    arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION", "INVERT")
                                    arcpy.ExtendLine_edit("del_clineSimpBuff1merge.shp", str(j+1)+" meters", "EXTENSION")
                                    
                                    #select share a line segment with buffer to remove buffer
                                     
                                    arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION") 
                                    arcpy.DeleteFeatures_management("lineLayer")
                                    arcpy.Delete_management("lineLayer")
                                    arcpy.CopyFeatures_management("del_clineSimpBuff1merge.shp", "del_clineSimp.shp")
                                    arcpy.Delete_management("del_clineSimpBuff1.shp")
                                    arcpy.Delete_management("del_clineSimpBuff1line.shp")
                                    arcpy.Delete_management("del_clineSimpBuff1merge.shp")
                                    count = count + j                                
                                del j, jlist
        
                                #remove last short ribs with a lenght threhold then reattach centerlines that may have been split
                                # calculate lenght of each centerline
                                if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0":
                                    arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_explode.shp")
                                    skip_iter = 1
                                    print "Centerline shape empty, iteration skipped."
                                else:
                                    arcpy.AddField_management("del_clineSimp.shp",'L','FLOAT')
                                    rows = arcpy.UpdateCursor("del_clineSimp.shp")
                                    for row in rows:
                                        areacliff = row.shape.length
                                        row.L = areacliff 
                                        rows.updateRow(row)
                                    del row, rows
                                    arcpy.CopyFeatures_management("del_clineSimp.shp", "min"+str("%02d" % (minSlope,))+"_extendedCenterlines.shp")
                                    
                                    # buffer out centerlines to capture end area removed in earlier buffer
                                    arcpy.Buffer_analysis("del_clineSimp.shp", "del_CliffCenterlineOut.shp", ((alpha*pixel*(2**(1/2)))/2), "FULL", "ROUND", "NONE")
            
                                    # define area with a slope less than that which defined "del_minCliff_dissolve.shp"
                                    edgeAreaSlope = areaSlope-beta_e
                                    print "Edge area defined by slope "+str(edgeAreaSlope)
                                    outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(edgeAreaSlope))
                                    outSetNull.save("del_edgeSlope.TIF") 
                                   
                                    outInt = Int("del_edgeSlope.TIF")
                                    outInt.save("del_edgeSlopeInt.TIF")                    
                                    arcpy.RasterToPolygon_conversion("del_edgeSlopeInt.TIF", "del_edgeAreaSlope.shp", "NO_SIMPLIFY", "VALUE")
                                    arcpy.AddField_management("del_edgeAreaSlope.shp", "value", "SHORT", 1, "", "", "", "", "")
                                    arcpy.Dissolve_management("del_edgeAreaSlope.shp", "del_edgeAreaSlope_dissolve.shp", "value")
                                    arcpy.CopyFeatures_management("del_edgeAreaSlope_dissolve.shp", "min"+str("%02d" % (minSlope,))+"_edgeArea.shp")
                                    arcpy.Intersect_analysis (["del_edgeAreaSlope_dissolve.shp", "del_CliffCenterlineOut.shp"], "del_betaF_edgeArea.shp")
                        
                                    # merge buffered lines with buffered area                    
                                    arcpy.Merge_management(["del_betaF_edgeArea.shp", "del_minCliff_explode.shp"], "del_lineAndArea.shp")
                                    arcpy.AddField_management("del_lineAndArea.shp", "valueDis", "SHORT", 1, "", "", "", "", "")                    
                                    arcpy.Dissolve_management("del_lineAndArea.shp", "del_lineAndArea_dissolve1.shp", "valueDis")
                                    arcpy.RepairGeometry_management("del_lineAndArea_dissolve1.shp")
                                    # fill holes and remove shapes less than one pixel to avoid error from buffer tool
                                    arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolve1.shp", "del_lineAndArea_explode1.shp")
                                    arcpy.CalculateAreas_stats("del_lineAndArea_explode1.shp", 'del_lineAndArea_area1.shp')
                                    arcpy.MakeFeatureLayer_management('del_lineAndArea_area1.shp', 'tempLayer')
                                    expression = 'F_AREA <' + str(pixel**2) # m2
                                    arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression)
                                    arcpy.DeleteFeatures_management('tempLayer')
                                    arcpy.Delete_management('tempLayer')
                                    arcpy.cartography.AggregatePolygons('del_lineAndArea_area1.shp', "del_lineAndArea_dissolve.shp", 1, 0, pixel**2, 'NON_ORTHOGONAL') 
                                                       
                                    arcpy.RepairGeometry_management("del_lineAndArea_dissolve.shp")
                                    # buffer in to reomve sliver geometries and out to make a diagonal set of single pixel shapes one feature
                                    arcpy.Buffer_analysis("del_lineAndArea_dissolve.shp", "del_lineAndArea_dissolveSmallBufferIn.shp", -0.5, "FULL", "ROUND", "ALL")
                                    arcpy.Buffer_analysis("del_lineAndArea_dissolveSmallBufferIn.shp", "del_lineAndArea_dissolveSmallBuffer.shp", 1, "FULL", "ROUND", "ALL")
                                    arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolveSmallBuffer.shp", "del_lineAndArea_explode.shp")
                                    arcpy.CalculateAreas_stats('del_lineAndArea_explode.shp', 'del_lineAndArea_area.shp')
                                    arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer')
                                    expression = 'F_AREA <=' + str((pixel**2)*A_min)
                                    arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression)
                                    arcpy.DeleteFeatures_management('tempLayer')
                                    arcpy.Delete_management('tempLayer')
                                    
                                    if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0":
                                        print "del_lineAndArea_area.shp empty, iteration stopped."
                                        skip_iter = 1
                                    else:
                                        arcpy.AddField_management("del_lineAndArea_area.shp", "value", "SHORT", 1, "", "", "", "", "")
                                        arcpy.CopyFeatures_management('del_lineAndArea_area.shp', "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp")                         
                    if skip_iter == 0:
                        # CDF for values between minSlope and maxSlope
                        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE >= "+ str(minSlope))
                        outSetNull.save("del_min.TIF")
                        arcpy.RasterToFloat_conversion("del_min.TIF", "del_min.flt")
                        minsl = Raster('del_min.flt')
                        slopemin = minsl*0.0
                        slopemin.save('del_minSl.TIF')            
                            
                        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE > "+ str(seedSlope))
                        outSetNull = SetNull(outSetNull, outSetNull, "VALUE < "+ str(minSlope))
                        outSetNull.save("del_mid.TIF")
                        arcpy.RasterToFloat_conversion("del_mid.TIF", "del_mid.flt")
                        midsl = Raster('del_mid.flt')
                        b = (1-(((1-minProb)/(seedSlope-minSlope))*seedSlope))
                        slopemid = (((1-minProb)/(seedSlope-minSlope))*midsl)+b
                        arcpy.env.snapRaster = dem
                        slopemid.save('del_midSl.TIF')
                        arcpy.env.snapRaster = dem
        
                        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope))
                        outSetNull.save("del_max.TIF")
                        arcpy.RasterToFloat_conversion("del_max.TIF", "del_max.flt")
                        maxsl = Raster('del_max.flt')
                        slopemax = maxsl*0.0+1.0
                        arcpy.env.snapRaster = dem
                        slopemax.save('del_maxSl.TIF')
                        arcpy.env.snapRaster = dem
                                
                        arcpy.MosaicToNewRaster_management("del_minSl.TIF;del_midSl.TIF;del_maxSl.TIF", workspace, "del_cliffProbabilitySlope.TIF", "", "32_BIT_FLOAT", "", "1", "LAST","FIRST")
                        arcpy.env.snapRaster = dem
        
                        # extract cliff probability and apply reduction factor to area outside of buffer.shp
                        if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0":
                            print "del_lineAndArea_area.shp is empty, did not create: CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA"  + str(int(areaSlope))+".TIF"
                        else:  
                            outExtractSlope = ExtractByMask("del_cliffProbabilitySlope.TIF", "del_lineAndArea_area.shp")
                            outExtractSlope.save("del_final_cliffs_found.TIF")
                            
                            arcpy.RasterToFloat_conversion("del_cliffProbabilitySlope.TIF", "del_CliffProbabilitySlope.flt")
                            CliffProbabilitySlope = Raster('del_CliffProbabilitySlope.flt')
                            CliffProbabilitySlopeREDUCED = CliffProbabilitySlope*phi
                            arcpy.env.snapRaster = dem
                            CliffProbabilitySlopeREDUCED.save('del_CliffProbabilitySlopeREDUCED.TIF')
            
                            arcpy.MosaicToNewRaster_management("del_final_cliffs_found.TIF;del_CliffProbabilitySlopeREDUCED.TIF", workspace, "CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA"  + str(int(areaSlope))+".TIF", "", "32_BIT_FLOAT", "", "1", "FIRST","FIRST")
                            arcpy.env.snapRaster = dem
                            
                            del CliffProbabilitySlope
                            del CliffProbabilitySlopeREDUCED
                                                       
                        del minsl
                        del midsl
                        del maxsl


                ## ----------------------------------
                ## Compute percent cliff in total spatial domain

                cliff_area_sum = 0
                debris_area_sum = 0
                Perc_Cliff = 0
                arcpy.CalculateAreas_stats(debarea_iteration, 'del_debris_area.shp')
                with arcpy.da.SearchCursor('del_debris_area.shp', ['F_AREA']) as cursor:
                    for row in cursor:
                        debris_area_sum += row[0]                
                                
                if os.path.isfile(workspace+'\\del_lineAndArea_area.shp') == False:
                    print "'del_lineAndArea_area.shp'does not exist."
                elif arcpy.management.GetCount('del_lineAndArea_area.shp')[0] == "0":
                    print "No area within 'del_lineAndArea_area.shp'."
                else:
                    with arcpy.da.SearchCursor('del_lineAndArea_area.shp', ['F_AREA']) as cursor:
                        for row in cursor:
                            cliff_area_sum += row[0]
                    Perc_Cliff = (cliff_area_sum/debris_area_sum)*100
                    arcpy.Dissolve_management("del_lineAndArea_area.shp", 'cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp', "value")
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','minSlope','FLOAT')
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Cliff','FLOAT')
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Deb','FLOAT')
                    
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Perc_Cliff','FLOAT')
                    rows = arcpy.UpdateCursor('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp')
                    for row in rows:
                        row.setValue('Area_Cliff', cliff_area_sum)
                        row.setValue('Area_Deb', debris_area_sum)
                        row.setValue('minSlope', minSlope)
                        row.setValue('Perc_Cliff', Perc_Cliff)
                        rows.updateRow(row)
                    del row, rows
                                     
                    print 'IceCliffLocation script [minSlope: ' + str("%02d" % (int(minSlope),)) + ' areaSlope: ' + str(int(areaSlope))+ '] done...'
                                         
        rasterList = arcpy.ListRasters("*del*")
        for raster in rasterList:
            arcpy.Delete_management(raster)
        del raster
        del rasterList

        fcList = arcpy.ListFeatureClasses("*del*")
        for fc in fcList:
            arcpy.Delete_management(fc)
        del fc
        del fcList

        print "intermediate files deleted"
            
    del minSlope
    del n
    
    if str(workspace.split("\\")[-1]) == 'Final':
        print "Script complete"        
    else:
        initialSlope_doubles = []
        percentCliffs_doubles = []
        initialSlope = []
        percentCliffs = []
        xfit = []
        yfit = []
        fcList = []
        arr = []
        fcList = arcpy.ListFeatureClasses("*cliffMap*")
        arcpy.Merge_management(fcList, "mergedSolutions.shp")
        arr = arcpy.da.TableToNumPyArray("mergedSolutions.shp", ('Perc_Cliff','minSlope'))
        arcpy.Delete_management("del_mergedSolutions.shp")
        initialSlope_doubles = [row[1] for row in arr]
        percentCliffs_doubles = [row[0] for row in arr]
        
        #remove rows that are repeated due to (possible) earlier tiled dissolve from insufficient memory 
        for i,j in enumerate(initialSlope_doubles):
            if j != initialSlope_doubles[(i-1) % len(initialSlope_doubles)]:
                initialSlope.append(j)
        del i,j
        for i,j in enumerate(percentCliffs_doubles):
            if j != percentCliffs_doubles[(i-1) % len(percentCliffs_doubles)]:
                percentCliffs.append(j)
        del i,j
                
        def func(x,a,b,c):
            return a*np.exp(-((x-b)/c)**2)
        try:
            popt, pcov = curve_fit(func,initialSlope,percentCliffs, maxfev=1000)
        except RuntimeError:
            fig = plt.figure()
            ax1 = fig.add_subplot(111)
            ax1.plot(initialSlope, percentCliffs, 'ko');plt.draw()
            fig.show()            
            print("Error - curve_fit failed")
        xfit = np.linspace(min(initialSlope), max(initialSlope), 100)
        yfit = popt[0]*np.exp(-((xfit-popt[1])/popt[2])**2)
        
        def secondDer(x):
            return popt[0]*(((4*(x-popt[1])**2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**4)-((2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**2))
        a1 = []
        a1 = [i for i in xrange(91)]
        a2 = secondDer(a1)
        #the next 3 for loops and a[x] variables define 1 of the 2 points to derive the optimization line.
        a3 = []
        a4 = []
        # values of second derivative where slope is below 'gamma'
        for i, j in enumerate(a2):
            if j <= gamma:
                a3.append(i) == i
        # find the steepest point (in the middle of the side of the bell)
        for i, j in enumerate(a2):
            if j == max(a2):
                m=i
        # take only values to the right of 'm' in case the curve is flat at 0 slope
        for i in a3:
            if i > m:
                a4.append(i) == i
        del i,j
                
        ax = min(a4) 
        ay = popt[0]*np.exp(-((ax-popt[1])/popt[2])**2)
        
        #find max of bell for first point in optmization line
        yfit_array = array(yfit)        
        ftup = (np.where(yfit_array == max(yfit_array)))
        f = int(ftup[0]) # x,y index of max yfit 
                
        # d = distance from fit Equation 2 (Herreid and Pellicciotti, 2018) to line definded by ((xfit[0],yfit[0]),(ax,yx))
        d = abs((yfit[f]-ay)*xfit-(xfit[f]-ax)*yfit+xfit[f]*ay-yfit[f]*ax)/((yfit[f]-ay)**2+(xfit[f]-ax)**2)**(1/2)
        # crit is the index of the longest d
        crit = np.where(d == max(d))
        m = (yfit[f]-ay)/(xfit[f]-ax)
        b = yfit[f]-m*xfit[f]
        x_crit = (xfit[crit]+m*yfit[crit]-m*b)/(m**2+1)
        y_crit = m*((xfit[crit]+m*yfit[crit]-m*b)/(m**2+1))+b
        
        fig = plt.figure()
        ax1 = fig.add_subplot(111)
        ax1.plot(initialSlope, percentCliffs, 'ko'); plt.plot([xfit[f],ax],[yfit[f],ay]); plt.plot([xfit[crit],x_crit],[yfit[crit],y_crit]); plt.plot(xfit,yfit);plt.xlim(0, 100);plt.ylim(0, 100);plt.gca().set_aspect('equal', adjustable='box');plt.draw()
        ax1.set_xlabel(r'$\mathrm{\beta_i (^\circ)}$')
        ax1.set_ylabel('Ice cliff fraction (%)')
        fig.show()
        #fig.canvas.flush_events()
        import time
        time.sleep(1)
        #plt.pause(0.01)
        #plt.waitforbuttonpress()
        
        #save data used to make figure
        np.save(workspace+'\\figureData', (initialSlope, percentCliffs,[xfit[f],ax],[yfit[f],ay],[xfit[crit],x_crit],[yfit[crit],y_crit],xfit,yfit))

        IceCliffLocation.minSlope = float(xfit[crit])
        arcpy.MakeFeatureLayer_management(FSAselection, tempFSAlyr)

        area = 0
        for a in arcpy.da.SearchCursor(FSAselection, "Shape_Area"):
            area = float(a[0])

        # Iterates through the ADAs that intersect with the currently selected FSA:    
        for row in arcpy.SearchCursor(ADAselection):
            adaentry_fid = row.fid
            arcpy.MakeFeatureLayer_management(ADAselection, tempADAlyr)
            tempselect = arcpy.SelectLayerByAttribute_management(tempADAlyr, "NEW_SELECTION", "FID = "+str(adaentry_fid))
            inputFeatures = [tempFSAlyr, tempADAlyr]
            try:
                arcpy.Intersect_analysis(inputFeatures, tempIntersection, "ALL", "", "INPUT")
                if arcpy.management.GetCount(tempIntersection)[0] == "1":
                    arcpy.CalculateAreas_stats(tempIntersection, tempIntersectionWithArea)
                    for a in arcpy.da.SearchCursor(tempIntersectionWithArea, "F_AREA"):                        
                        matrix[int(adaentry_fid)] = a[0]/area
            except Exception as e:
                print("Failed operation: ", e)            
            arcpy.Delete_management(tempADAlyr)
            arcpy.Delete_management(tempIntersection)
            arcpy.Delete_management(tempIntersectionWithArea)
        arcpy.Delete_management(tempFSAlyr)
        writer.writerow(matrix)

end = time.strftime('%X %x %Z')
print("Relationship matrix processing times:")
print("Start: ", start)
print("End: ", end)