Exemplo n.º 1
0
def clip_to_shape(rasterlist, shapefile, outdir = False):
    """
     Simple batch clipping script to clip rasters to shapefiles.

     Inputs:
       rasterlist      single file, list of files, or directory for which to clip rasters
       shapefile       shapefile to which rasters will be clipped
       outdir          desired output directory. If no output directory is specified, the
                       new files will simply have '_c' added as a suffix.
    """

    rasterlist = enf_rastlist(rasterlist)

    # ensure output directorycore.exists
    if outdir and not os.path.exists(outdir):
        os.makedirs(outdir)

    for raster in rasterlist:

        # create output filename with "c" suffix
        outname = core.create_outname(outdir,raster,'c')

        # perform double clip , first using clip_management (preserves no data values)
        # then using arcpy.sa module which can actually do clipping geometry unlike the management tool.
        arcpy.Clip_management(raster, "#", outname, shapefile, "ClippingGeometry")
        out = ExtractByMask(outname, shapefile)
        out.save(outname)
        print("Clipped and saved: {0}".format(outname))

    return
Exemplo n.º 2
0
def filter_evi(land_cover_ras):
    evi_dir = "G:\evi_extract"
    evi_files = glob.glob(os.path.join(evi_dir, "*.tif"))
    for evi_file in evi_files:
        print("Extracting {} by mask".format(evi_file))
        filted_evi = ExtractByMask(evi_file, land_cover_ras)
        filted_evi.save(os.path.join(env.workspace, "evis_for_41", os.path.basename(evi_file)))
def rasterPercentAreas(feature, featureID, inRaster, uniqueRasterIDfield,
                       rasterValueField, fieldPrefix):
    try:
        #create results obj
        results = {}

        #define land use key value dictionary with all possible values
        for row in arcpy.da.SearchCursor(inRaster, uniqueRasterIDfield):
            results[fieldPrefix + str(row[0])] = 0

        #mask raster
        outExtractByMask = ExtractByMask(inRaster, feature)
        outExtractByMask.save('in_memory/mask.img')

        #get total cell count for percent area computation
        field = arcpy.da.TableToNumPyArray('in_memory/mask.img',
                                           rasterValueField,
                                           skip_nulls=True)
        sum = field[rasterValueField].sum()

        #loop over masked raster rows
        for row in arcpy.da.SearchCursor(
                'in_memory/mask.img', [uniqueRasterIDfield, rasterValueField]):

            #get values
            value, count = row
            percentArea = round((float(count) / sum) * 100, 5)
            results[fieldPrefix + str(row[0])] = percentArea

        data = ResultObj(featureID, results)
        return data

    except:
        tb = format_exc().split('\n')
        raise Exception(tb)
Exemplo n.º 4
0
def clip_to_shape(rasterlist, shapefile, outdir = False):
    """
    Simple batch clipping script to clip rasters to shapefiles.

    :param rasterlist:      single file, list of files, or directory for which to clip rasters
    :param shapefile:       shapefile to which rasters will be clipped
    :param outdir:          desired output directory. If no output directory is specified, the
                            new files will simply have '_c' added as a suffix.

    :return output_filelist:    list of files created by this function.
    """

    rasterlist = enf_rastlist(rasterlist)
    output_filelist = []

    # ensure output directorycore.exists
    if outdir and not os.path.exists(outdir):
        os.makedirs(outdir)

    for raster in rasterlist:

        # create output filename with "c" suffix
        outname = core.create_outname(outdir,raster,'c')

        # perform double clip , first using clip_management (preserves no data values)
        # then using arcpy.sa module which can actually do clipping geometry unlike the management tool.
        arcpy.Clip_management(raster, "#", outname, shapefile, "ClippingGeometry")
        out = ExtractByMask(outname, shapefile)
        out.save(outname)
        output_filelist.append(outname)
        print("Clipped and saved: {0}".format(outname))

    return output_filelist
Exemplo n.º 5
0
def extract_by_mask(in_raster, in_mask, output_path):
    path_check(output_path)
    if os.path.exists(output_path):
        print("{} has already been created before".format(output_path))
        return
    print "Extracting {} by mask {}".format(output_path, in_mask)
    temp_output = ExtractByMask(in_raster, in_mask)
    temp_output.save(output_path)
Exemplo n.º 6
0
def extract_by_mask(in_raster, in_mask, output_path):
    path_check(output_path)
    if os.path.exists(output_path):
        print("{} has already been created before".format(output_path))
        return
    print "Extracting {} by mask {}".format(output_path, in_mask)
    temp_output = ExtractByMask(in_raster, in_mask)
    temp_output.save(output_path)
Exemplo n.º 7
0
def delresident(inpath, outpath):
    env.workspace = inpath
    f = os.listdir(inpath)
    for i in range(len(f)):
        if f[i].endswith('.tif'):
            outfile = outpath + '\\' + f[i]
            outExtractByMask = ExtractByMask(inpath + '\\' + f[i],
                                             r"F:\EULUC\city_noresident.tif")
            outExtractByMask.save(outfile)
Exemplo n.º 8
0
def evi_raster_shrink(evi_raster_path, feature_points_path, radius):
    if not os.path.exists(
            os.path.join(env.workspace, "feature_points_buffer.shp")):
        arcpy.Buffer_analysis(feature_points_path,
                              "feature_points_buffer",
                              radius,
                              dissolve_option="ALL")
    out_raster = ExtractByMask(evi_raster_path, "feature_points_buffer.shp")
    out_raster.save("evi_raster_shrink.tif")
    return out_raster
Exemplo n.º 9
0
def recortar(entidadRecorte, lista_capas_gdb, save_path):
    """ Recorta capas en funcion de la entidad de recorte.
    lista_capas_gdb: lista de capas a cortar
    save_path: donde guardar la vaina
    """
    inRaster = capa
    inMaskData = entidadRecorte
    outExtractByMask = ExtractByMask(inRaster, inMaskData)
    outExtractByMask.save(os.path.join(ruta_geodb_trabajo, save_path))
    if debug:
        print("estoy recortando {0}".format(capa))
        print("estoy guardando en {0}".format(save_path))
Exemplo n.º 10
0
def batch_clip_raster(inRaster, inMaskData, savepath):
    # Check out the ArcGIS Spatial Analyst extension license
    arcpy.CheckOutExtension("Spatial")
    cursor = arcpy.SearchCursor(inMaskData)
    for row in cursor:
        mask = row.getValue("Shape")
        name = row.getValue(
            "name")  # name the output layer by COUNTRY_NA field
        print(name.encode('gbk'))
        outExtractByMask = ExtractByMask(inRaster, mask)
        outExtractByMask.save(
            os.path.join(savepath,
                         name.encode('gbk') + '.tif'))
        print(os.path.join(savepath, name.encode('gbk') + '.tif'))
def CreateTiff(inShapefile, fieldName, maskShapefile, outRaster):
    """"

    """
    outIDW = Idw(inShapefile, fieldName, 500, 2, arcpy.sa.RadiusVariable(8))
    outExtractByMask = ExtractByMask(outIDW, maskShapefile)
    arcpy.RasterToOtherFormat_conversion(outExtractByMask, outRaster, "TIFF")
Exemplo n.º 12
0
    def restrictDomain(self, object, operation):
        """
        Restricts current instance's domain based on object's domain
        @param object: extent to which the field is restricted
        @param operation: valid options: "inside", "outside"
        """

        if operation == 'inside':

            name = "restDom_in_" + str(self.sObj)
            outputLocation = "in_memory\\" + name + ".tif"

            # extract by mask
            outRaster = ExtractByMask(self.filepath, object.filepath)
            CopyRaster_management(outRaster, outputLocation)
            restDom = utils.makeField(outputLocation)

        elif operation == 'outside':
            raise NotImplementedError("restrictDomain 'outside'")

        else:
            raise NotImplementedError(operation)

        # update cc instance's attributes
        desc = Describe(outputLocation)
        restDom.filepath = outputLocation
        restDom.domain = desc.extent
        restDom.filename = os.path.basename(outputLocation)

        return restDom
Exemplo n.º 13
0
def CreateMapStratum(mnh, emprise, TextRemap, idfield, outputFC, geodata):

    # Définir l'environnement de travail
    arcpy.env.workspace = geodata
    arcpy.env.overwriteOutput = True
    disp = arcpy.AddMessage

    # Extraire par le masque
    disp("Ectracting MNH ...")
    pathOutExtract = os.path.join(geodata,"OutExtract")
    OutExtract = ExtractByMask(mnh, emprise)
    OutExtract.save(pathOutExtract)

    # Reclassement
    disp("Reclassing ...")
    remap = TextToRemap(TextRemap)
    pathOutReclass = os.path.join(geodata,"Reclass")
    OutReclass = Reclassify(pathOutExtract,"Value", remap)
    OutReclass.save(pathOutReclass)

    # Convertir en polygon
    disp("Vectorisating ...")
    arcpy.RasterToPolygon_conversion(pathOutReclass, "PolyRaster")

    # clip
    arcpy.Clip_analysis("PolyRaster", emprise, "PolyRaster_Clip")

    # identity
    arcpy.Identity_analysis("PolyRaster_Clip", emprise, "PolyRaster_Ident")

    # Dissolve
    disp("Dissolving ...")
    dissolveFileds =[idfield, "grid_code"]
    arcpy.Dissolve_management("PolyRaster_Ident", outputFC, dissolveFileds)

    # supprimer le champ grid_code
    arcpy.AddField_management(outputFC,"Strate", "SHORT")
    arcpy.CalculateField_management(outputFC, "Strate", "!grid_code!","PYTHON")
    lfields = arcpy.ListFields(outputFC)
    arcpy.DeleteField_management(outputFC,"grid_code")


    # retourner le résultat :
    return outputFC
Exemplo n.º 14
0
def extract_raster(**kwargs):
    """
    Description: extracts a raster to a mask
    Inputs: 'work_geodatabase' -- path to a file geodatabase that will serve as the workspace
            'input_array' -- an array containing the target raster to extract (must be first) and the mask raster (must be second)
            'output_array' -- an array containing the output raster
    Returned Value: Returns a raster dataset
    Preconditions: the initial raster must be created from other scripts and the study area raster must be created manually
    """

    # Import packages
    import arcpy
    from arcpy.sa import ExtractByMask
    from arcpy.sa import Raster
    import datetime
    import time

    # Parse key word argument inputs
    work_geodatabase = kwargs['work_geodatabase']
    input_raster = kwargs['input_array'][0]
    mask_raster = kwargs['input_array'][1]
    output_raster = kwargs['output_array'][0]

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Set workspace
    arcpy.env.workspace = work_geodatabase

    # Set snap raster and extent
    arcpy.env.snapRaster = mask_raster
    arcpy.env.extent = Raster(mask_raster).extent

    # Extract raster to study area
    print('\t\tPerforming extraction to study area...')
    iteration_start = time.time()
    extract_raster = ExtractByMask(input_raster, mask_raster)
    arcpy.management.CopyRaster(extract_raster, output_raster, '', '',
                                '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                'NONE', 'NONE', 'TIFF', 'NONE',
                                'CURRENT_SLICE', 'NO_TRANSPOSE')
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\t\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t\t----------')
    out_process = f'\tSuccessfully extracted raster data to mask.'
    return out_process
def testStatistics(feature, featureID, calculateStat):

    str900_all = "D:\\StreamStats\\ny_strgrid\\str900_all"
    ned10sl = "D:\\ned10\\output\\ned10sl_utm.img"

    #create results obj
    results = {}

    #arcpy.CopyFeatures_management(feature, "C:\\NYBackup\\NYFF2016\\LGSS_sites\\output\\sh_" + featureID + ".shp")

    # Process: Extract by Mask
    strExtractByMask = ExtractByMask(str900_all, feature)
    strExtractByMask.save("C:\\NYBackup\\NYFF2016\\LGSS_sites\\output\\str_" +
                          featureID + ".img")
    slExtractByMask = ExtractByMask(ned10sl, strExtractByMask)
    slExtractByMask.save("C:\\NYBackup\\NYFF2016\\LGSS_sites\\output\\sl_" +
                         featureID + ".img")
    value = arcpy.GetRasterProperties_management(slExtractByMask,
                                                 calculateStat).getOutput(0)
    value = round(float(value), 5)
    results[calculateStat] = value
    data = ResultObj(featureID, results)
    return data
def rasterStatistics(feature, featureID, inRaster, calculateStat):
    try:
        #create results obj
        results = {}

        #get values
        outExtractByMask = ExtractByMask(inRaster, feature)
        value = arcpy.GetRasterProperties_management(
            outExtractByMask, calculateStat).getOutput(0)

        value = round(float(value), 5)
        results[calculateStat] = value
        data = ResultObj(featureID, results)
        return data

    except:
        try:
            #check raster cell size against input
            cellsize = float(
                arcpy.GetRasterProperties_management(inRaster,
                                                     'CELLSIZEX').getOutput(0))
            print 'in Except block, area check:', feature.area, cellsize**2

            #get centroid value if first method failed
            value = arcpy.GetCellValue_management(
                inRaster,
                str(feature.centroid.X) + ' ' +
                str(feature.centroid.Y)).getOutput(0)
            value = round(float(value), 5)
            results[calculateStat] = value
            data = ResultObj(featureID, results)
            return data

        except:
            tb = format_exc()
            raise Exception(tb)
    iteration = iteration + 1
    arcpy.AddMessage(
        "--------------------------------------------------------------")
    arcpy.AddMessage("Iteration {0} is running!".format(iteration))

    # DEM difference [m]
    dh = Raster(DEM_master) - Raster(DEM_slave_after)

    # slope of the slave DEM [degree]
    slp = Slope(DEM_slave_after, "DEGREE", "1")

    # aspect of the slave DEM [degree]
    asp = Aspect(DEM_slave_after)

    # Mask 'dh' using statale terrain polygon
    dh_mask = ExtractByMask(dh, OffGlacier)

    # Mask 'slp' and 'asp' using 'dh_mask' in order to keep same georeference as 'dh_mask'
    slp_mask = ExtractByMask(slp, dh_mask)
    asp_mask = ExtractByMask(asp, dh_mask)

    del dh, slp, asp

    # Raster to Array
    dh_mask_arr = arcpy.RasterToNumPyArray(dh_mask, nodata_to_value=-32768)
    slp_mask_arr = arcpy.RasterToNumPyArray(slp_mask, nodata_to_value=-32768)
    asp_mask_arr = arcpy.RasterToNumPyArray(asp_mask, nodata_to_value=-32768)

    del dh_mask, slp_mask, asp_mask

    # save "dh_mask" as csv file
Exemplo n.º 18
0
def create_buffered_tiles(**kwargs):
    """
    Description: creates buffered grid rasters
    Inputs: 'tile_name' -- a field name in the grid index that stores the tile name
            'distance' -- a string representing a number and units for buffer distance
            'work_geodatabase' -- a geodatabase to store temporary results
            'input_array' -- an array containing the input grid index and a clip area
            'output_folder' -- an empty folder to store the output tiles
    Returned Value: Returns a raster dataset for each grid in grid index
    Preconditions: grid index must have been generated using create_grid_indices
    """

    # Import packages
    import arcpy
    from arcpy.sa import ExtractByMask
    from arcpy.sa import Reclassify
    from arcpy.sa import RemapRange
    import datetime
    import os
    import time

    # Parse key word argument inputs
    tile_name = kwargs['tile_name']
    distance = kwargs['distance']
    work_geodatabase = kwargs['work_geodatabase']
    grid_index = kwargs['input_array'][0]
    snap_raster = kwargs['input_array'][1]
    output_folder = kwargs['output_folder']

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Use two thirds of the possible cores on the machine
    arcpy.env.parallelProcessingFactor = '66%'

    # Set workspace
    arcpy.env.workspace = work_geodatabase

    # Set the snap raster
    arcpy.env.snapRaster = snap_raster

    # Print initial status
    print(f'Extracting grid tiles from {os.path.split(grid_index)[1]}...')

    # Define fields for search cursor
    fields = ['SHAPE@', tile_name]
    # Initiate search cursor on grid index with defined fields
    with arcpy.da.SearchCursor(grid_index, fields) as cursor:
        # Iterate through each feature in the feature class
        for row in cursor:
            # Define an output and temporary raster
            buffer_feature = os.path.join(arcpy.env.workspace,
                                          'Grid_' + row[1] + '_Buffer')
            output_grid = os.path.join(output_folder,
                                       'Grid_' + row[1] + '.tif')

            # If tile does not exist, then create tile
            if arcpy.Exists(output_grid) == 0:
                print(
                    f'\tProcessing grid tile {os.path.split(output_grid)[1]}...'
                )
                iteration_start = time.time()
                # Define feature
                feature = row[0]
                # Buffer feature by user specified distance
                arcpy.analysis.Buffer(feature, buffer_feature, distance)
                # Extract snap raster to buffered tile feature
                extract_raster = ExtractByMask(snap_raster, buffer_feature)
                # Reclassify values to 1
                reclassify_raster = Reclassify(extract_raster, 'Value',
                                               RemapRange([[1, 100000, 1]]))
                # Copy raster to output
                arcpy.management.CopyRaster(reclassify_raster, output_grid, '',
                                            '', '0', 'NONE', 'NONE', '1_BIT',
                                            'NONE', 'NONE', 'TIFF', 'None')
                # If temporary feature class exists, then delete it
                if arcpy.Exists(buffer_feature) == 1:
                    arcpy.management.Delete(buffer_feature)
                # End timing
                iteration_end = time.time()
                iteration_elapsed = int(iteration_end - iteration_start)
                iteration_success_time = datetime.datetime.now()
                # Report success
                print(
                    f'\tOutput grid {os.path.split(output_grid)[1]} completed at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
                )
                print('\t----------')
            else:
                print(
                    f'\tOutput grid {os.path.split(output_grid)[1]} already exists...'
                )
                print('\t----------')

    # Return final status
    out_process = 'Completed creation of grid tiles.'
    return out_process
Exemplo n.º 19
0
def format_climate_grids(**kwargs):
    """
    Description: extracts climate data to the grid
    Inputs: 'input_array' -- an array containing the study area raster (must be first), grid raster (must be second) and the climate raster (last)
            'output_array' -- an array containing the output climate raster for the grid
    Returned Value: Returns a raster dataset on disk containing the combined climate property for a grid
    Preconditions: requires an input grid raster and climate property raster
    """

    # Import packages
    import arcpy
    from arcpy.sa import ExtractByMask
    from arcpy.sa import Raster
    import datetime
    import time

    # Parse key word argument inputs
    study_area = kwargs['input_array'][0]
    grid_raster = kwargs['input_array'][1]
    climate_raster = kwargs['input_array'][2]
    output_raster = kwargs['output_array'][0]

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Set snap raster and extent
    arcpy.env.snapRaster = study_area
    arcpy.env.cellSize = 'MINOF'
    arcpy.env.extent = Raster(grid_raster).extent

    # Extract the climate data to the grid raster and study area
    print(f'\tExtracting climate data to grid...')
    iteration_start = time.time()
    extract_grid = ExtractByMask(climate_raster, grid_raster)
    extract_area = ExtractByMask(extract_grid, study_area)
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')

    # Export extracted raster to output raster
    print(f'\tExporting extracted raster to output raster...')
    iteration_start = time.time()
    arcpy.CopyRaster_management(extract_area, output_raster, '', '', '-32768',
                                'NONE', 'NONE', '16_BIT_SIGNED', 'NONE',
                                'NONE', 'TIFF', 'NONE')
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')
    out_process = f'Finished formatting climate data to grid.'
    return out_process
Exemplo n.º 20
0
    def glacier_debris(band_4, band_5, glacier_outline, out_dir):
        print 'Running glacier_debris'
        if Want_CloudRemoval == 'True':
            outExtractByMask = ExtractByMask(
                band_4,
                mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] +
                band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp')
            outExtractByMask.save('del_nodatagone4.TIF')
            outExtractByMask = ExtractByMask(
                band_5,
                mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] +
                band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp')
            outExtractByMask.save('del_nodatagone5.TIF')
            outExtractByMask = ExtractByMask('del_nodatagone4.TIF',
                                             glacier_outline)
            outExtractByMask.save('del_mask4.TIF')
            outExtractByMask = ExtractByMask('del_nodatagone5.TIF',
                                             glacier_outline)
            outExtractByMask.save('del_mask5.TIF')
            print 'extract'
        else:
            outExtractByMask = ExtractByMask(band_4, glacier_outline)
            outExtractByMask.save('del_mask4.TIF')
            outExtractByMask = ExtractByMask(band_5, glacier_outline)
            outExtractByMask.save('del_mask5.TIF')
            print 'extract'
        #Convert Raster to float for decimal threshold values
        arcpy.RasterToFloat_conversion('del_mask4.TIF', 'del_band_4a.flt')
        arcpy.RasterToFloat_conversion('del_mask5.TIF', 'del_band_5a.flt')
        arcpy.Divide_3d('del_band_4a.flt', 'del_band_5a.flt',
                        'del_division.TIF')
        print 'division'
        outSetNull = SetNull('del_division.TIF', 'del_division.TIF',
                             'VALUE > ' + str(threshold))

        #path to results folder, for loops add a counter if images are from the same year and day
        result_name = glacier_outline.split('.shp')[0].split(
            '\\'
        )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split(
            '\\')[-1][13:16] + 'd' + '_L' + band_4.split(
                '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split(
                    '_')[-1][1:2] + 'b' + str(int(
                        threshold *
                        100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f'
        result_path = out_dir + glacier_outline.split('.shp')[0].split(
            '\\'
        )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split(
            '\\')[-1][13:16] + 'd' + '_L' + band_4.split(
                '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split(
                    '_')[-1][1:2] + 'b' + str(int(
                        threshold *
                        100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f'

        if str(result_name + '1.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '2'
        elif str(result_name + '2.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '3'
        elif str(result_name + '3.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '4'
        elif str(result_name + '4.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '5'
        elif str(result_name + '5.shp' in os.listdir(out_dir)) == 'True':
            result_path = result_path + '6'
        else:
            result_path = result_path + '1'

        result_file = result_path + '.TIF'
        print 'result file: ' + result_file

        outSetNull.save(result_file)
        print 'Level 1 product produced'

        #Float raster to integer
        outInt = Int(result_file)
        outInt.save('del_result_file_int.TIF')
        # Set local variables
        inRaster = 'del_result_file_int.TIF'
        outPolygons = 'del_debris.shp'
        field = 'VALUE'
        arcpy.RasterToPolygon_conversion(inRaster, outPolygons, 'NO_SIMPLIFY',
                                         field)
        print 'to polygon'

        #Process: Dissolve. need to create "value" row where all elements=0
        arcpy.AddField_management('del_debris.shp', 'value', 'SHORT', 1, '',
                                  '', '', '', '')
        arcpy.Dissolve_management('del_debris.shp', 'del_debris_dissolve.shp',
                                  'value')
        print 'dissolve'
        # Run the tool to create a new fc with only singlepart features
        arcpy.MultipartToSinglepart_management('del_debris_dissolve.shp',
                                               'del_explode.shp')
        print 'explode'
        # Process: Calculate polygon area (km2)
        arcpy.CalculateAreas_stats('del_explode.shp', 'del_area.shp')
        arcpy.MakeFeatureLayer_management('del_area.shp', 'tempLayer')
        # Execute SelectLayerByAttribute to determine which features to delete
        expression = 'F_AREA <=' + str(A_remove)  # m2
        arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION',
                                                expression)
        arcpy.DeleteFeatures_management('tempLayer')
        print 'Shapes with an area <= ' + str(
            A_remove) + ' m2 removed; ' + str(
                A_remove / 900) + ' pixles, if 30m pixels'
        #Delete polygons < xx m2
        arcpy.Delete_management('tempLayer')
        print 'tempLayer deleted'
        result_file2 = result_path + '.shp'
        print 'Level 2 result file: ' + result_file2
        #Process: aggrigate (distance=1 m minimum area=0 minimum hole size=xx m: )
        CA.AggregatePolygons('del_area.shp', result_file2, 1, 0, A_fill,
                             'NON_ORTHOGONAL')
        print 'holes with an area <= ' + str(
            A_fill) + ' m2 filled/merged with debris polygon; ' + str(
                A_fill / 900) + ' pixles, if 30m pixels'

        rasterList = arcpy.ListRasters('*del*')
        for raster in rasterList:
            arcpy.Delete_management(raster)

        fcList = arcpy.ListFeatureClasses('*del*')
        for fc in fcList:
            arcpy.Delete_management(fc)

        print 'intermediate files deleted'
        print 'level 2 product produced'
Exemplo n.º 21
0
campo = "OBJECTID"

entidadRecorte = arcpy.Select_analysis(catastro_capa,
                                       r"C:\script\workspace\datosInput.gdb",
                                       campo + "=" + str(input_catastro))
#entidadRecorte = arcpy.Select_analysis(catastro_capa,r"C:\script\workspace\datosInput.gdb",campo + "= '" + str(numero) + "'")

### REALIZAMOS EL CLIP DE CATASTRO CON LOS DATOS CLIMATICOS:
arcpy.env.workspace = r"C:\script\workspace\datosInput.gdb"
lista_capas_gdb = arcpy.ListDatasets("*", "Raster")
#print(lista_capas_gdb)

for capa in lista_capas_gdb:
    inRaster = capa
    inMaskData = entidadRecorte
    outExtractByMask = ExtractByMask(inRaster, inMaskData)
    outExtractByMask.save(
        os.path.join(r"C:\script\workspace\datosInput.gdb", capa + "_recorte"))
# print(f"estoy recortando {capa}")

#RECORTE DATOS CONSTANTES:

#RECORTAMOS LA CAPA DE GEOLOGIA
enRaster = geologia_capa
enMaskData = entidadRecorte
outExtractByMask = ExtractByMask(enRaster, enMaskData)
outExtractByMask.save(r"C:\script\workspace\datosInput.gdb\geologia_recorte")
#print("He recortado geologia")

#RECORTAMOS CAPA DE PENDIENTE:
raster = pendiente_capa
def create_composite_dem(**kwargs):
    """
    Description: mosaics extracted source rasters with first data priority and extracts to mask
    Inputs: 'cell_size' -- a cell size for the output DEM
            'output_projection' -- the machine number for the output projection
            'work_geodatabase' -- a geodatabase to store temporary results
            'input_array' -- an array containing the grid raster (must be first) and the list of sources DEMs in prioritized order
            'output_array' -- an array containing the output raster
    Returned Value: Returns a raster dataset on disk containing the merged source DEM
    Preconditions: requires source DEMs and predefined grid
    """

    # Import packages
    import arcpy
    from arcpy.sa import ExtractByMask
    from arcpy.sa import Raster
    import datetime
    import os
    import time

    # Parse key word argument inputs
    cell_size = kwargs['cell_size']
    output_projection = kwargs['output_projection']
    elevation_inputs = kwargs['input_array']
    grid_raster = elevation_inputs.pop(0)
    composite_raster = kwargs['output_array'][0]

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Use two thirds of cores on processes that can be split.
    arcpy.env.parallelProcessingFactor = "75%"

    # Set snap raster and extent
    arcpy.env.snapRaster = grid_raster
    arcpy.env.extent = Raster(grid_raster).extent

    # Determine input raster value type
    value_number = arcpy.management.GetRasterProperties(
        elevation_inputs[0], "VALUETYPE")[0]
    no_data_value = arcpy.Describe(elevation_inputs[0]).noDataValue
    value_dictionary = {
        0: '1_BIT',
        1: '2_BIT',
        2: '4_BIT',
        3: '8_BIT_UNSIGNED',
        4: '8_BIT_SIGNED',
        5: '16_BIT_UNSIGNED',
        6: '16_BIT_SIGNED',
        7: '32_BIT_UNSIGNED',
        8: '32_BIT_SIGNED',
        9: '32_BIT_FLOAT',
        10: '64_BIT'
    }
    value_type = value_dictionary.get(int(value_number))
    print(f'Output data type will be {value_type}.')
    print(f'Output no data value will be {no_data_value}.')

    # Define the target projection
    composite_projection = arcpy.SpatialReference(output_projection)

    # Define folder structure
    grid_title = os.path.splitext(os.path.split(grid_raster)[1])[0]
    mosaic_location, mosaic_name = os.path.split(composite_raster)
    # Create mosaic location if it does not already exist
    if os.path.exists(mosaic_location) == 0:
        os.mkdir(mosaic_location)

    # Create source folder within mosaic location if it does not already exist
    source_folder = os.path.join(mosaic_location, 'sources')
    if os.path.exists(source_folder) == 0:
        os.mkdir(source_folder)

    # Create an empty list to store existing extracted source rasters for the area of interest
    input_length = len(elevation_inputs)
    input_rasters = []
    count = 1
    # Iterate through all input rasters to extract to grid and append to input list
    for raster in elevation_inputs:
        # Define output raster file path
        output_raster = os.path.join(source_folder, os.path.split(raster)[1])
        # Extract input raster if extracted raster does not already exist
        if os.path.exists(output_raster) == 0:
            try:
                print(
                    f'\tExtracting elevation source {count} of {input_length}...'
                )
                iteration_start = time.time()
                # Extract raster to mask
                extract_raster = ExtractByMask(raster, grid_raster)
                # Copy extracted raster to output
                print(
                    f'\tSaving elevation source {count} of {input_length}...')
                arcpy.management.CopyRaster(extract_raster, output_raster, '',
                                            '', no_data_value, 'NONE', 'NONE',
                                            value_type, 'NONE', 'NONE', 'TIFF',
                                            'NONE')
                # End timing
                iteration_end = time.time()
                iteration_elapsed = int(iteration_end - iteration_start)
                iteration_success_time = datetime.datetime.now()
                # Report success
                print(
                    f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
                )
                print('\t----------')
            except:
                print('\tElevation source does not overlap grid...')
                print('\t----------')
        else:
            print(
                f'\tExtracted elevation source {count} of {input_length} already exists...'
            )
            print('\t----------')
        # Append extracted input raster to inputs list
        if os.path.exists(output_raster) == 1:
            input_rasters.append(output_raster)
        # Increase counter
        count += 1

    # Append the grid raster to the list of input rasters
    input_rasters.append(grid_raster)

    # Report the raster priority order
    raster_order = []
    for raster in input_rasters:
        name = os.path.split(raster)[1]
        raster_order.append(name)
    print(f'\tPriority of input sources for {grid_title}...')
    count = 1
    for raster in raster_order:
        print(f'\t\t{count}. {raster}')
        # Increase the counter
        count += 1

    # Mosaic raster tiles to new raster
    print(f'\tMosaicking the input rasters for {grid_title}...')
    iteration_start = time.time()
    arcpy.management.MosaicToNewRaster(input_rasters, mosaic_location,
                                       mosaic_name, composite_projection,
                                       value_type, cell_size, '1', 'FIRST',
                                       'FIRST')
    # Enforce correct projection
    arcpy.management.DefineProjection(composite_raster, composite_projection)
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')
    out_process = f'Finished elevation composite for {grid_title}.'
    return out_process
def convert_fire_history(**kwargs):
    """
    Description: converts fire history polygons to rasters and extracts to major grid and study area
    Inputs: 'work_geodatabase' -- path to a file geodatabase that will serve as the workspace
            'input_array' -- an array containing the target feature class to convert (must be first), the study area raster (must be second), and the grid raster (must be third)
            'output_array' -- an array containing the output raster
    Returned Value: Returns a raster dataset
    Preconditions: the target feature class must be created using the recent fire history function
    """

    # Import packages
    import arcpy
    from arcpy.sa import Con
    from arcpy.sa import ExtractByMask
    from arcpy.sa import IsNull
    from arcpy.sa import Raster
    import datetime
    import time
    import os

    # Parse key word argument inputs
    work_geodatabase = kwargs['work_geodatabase']
    input_feature = kwargs['input_array'][0]
    study_area = kwargs['input_array'][1]
    grid_raster = kwargs['input_array'][2]
    output_raster = kwargs['output_array'][0]

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Set workspace
    arcpy.env.workspace = work_geodatabase

    # Set snap raster and extent
    arcpy.env.snapRaster = study_area
    arcpy.env.extent = Raster(grid_raster).extent
    arcpy.env.cellSize = 'MINOF'

    # Define intermediate rasters
    convert_raster = os.path.splitext(output_raster)[0] + '.tif'

    # Convert fire history feature class to raster
    print('\tConverting feature class to raster within grid...')
    iteration_start = time.time()
    arcpy.conversion.PolygonToRaster(input_feature, 'FireYear', convert_raster, 'CELL_CENTER', 'FireYear', 10)
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})')
    print('\t----------')

    # Convert no data values to zero
    print('\tConverting no data to zero...')
    iteration_start = time.time()
    zero_raster = Con(IsNull(Raster(convert_raster)), 0, Raster(convert_raster))
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})')
    print('\t----------')

    # Extract raster to study area
    print(f'\tExtracting raster to grid...')
    iteration_start = time.time()
    extract1_raster = ExtractByMask(zero_raster, grid_raster)
    print(f'\tExtracting raster to study area...')
    extract2_raster = ExtractByMask(extract1_raster, study_area)
    print(f'\tCopying extracted raster to new raster...')
    arcpy.management.CopyRaster(extract2_raster,
                                output_raster,
                                '',
                                '',
                                '-32768',
                                'NONE',
                                'NONE',
                                '16_BIT_SIGNED',
                                'NONE',
                                'NONE',
                                'TIFF',
                                'NONE',
                                'CURRENT_SLICE',
                                'NO_TRANSPOSE'
                                )
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})')
    print('\t----------')
    out_process = f'Successfully extracted recent fire history to study area.'
    return out_process
Exemplo n.º 24
0
def evi_raster_shrink(evi_raster_path, feature_points_path, radius):
    arcpy.Buffer_analysis(feature_points_path, "feature_points_buffers", radius, dissolve_option="ALL")
    out_raster = ExtractByMask(evi_raster_path, "feature_points_buffers")
    out_raster.save("evi_raster_shrink")
    return out_raster
Exemplo n.º 25
0
def VegetationHeightProfil(emprise, mnh, bornes, OutputFc, idfield, geodata):
    from arcpy import env
    from arcpy.sa import ExtractByMask, Slope
    arcpy.CheckOutExtension("spatial")
    env.workspace= geodata
    env.overwriteOutput = True

    # Extraire le mnh
    pathExtract = os.path.join(geodata, "ExtractMNH")
    Extract_MNH = ExtractByMask(mnh, emprise)
    Extract_MNH.save(pathExtract)

    # Calculer la pente
    pathSlope = os.path.join(geodata,"SlopeMNH")
    slope_mnh = Slope(pathExtract,"DEGREE")
    slope_mnh.save(pathSlope)

    # Transformer le raster en point
    arcpy.RasterToPoint_conversion(slope_mnh, "Slope", "Value")

    # Jointure spatiale Cauler Moyenne et Ecart type
    fmap = arcpy.FieldMappings()
    fmap.addTable(emprise)
    fmap.addTable("Slope")

    # Create fieldmap for Mean
    fldMean = arcpy.FieldMap()
    fldMean.addInputField("Slope", "grid_code")
    fMean = fldMean.outputField
    fMean.name = "Mean"
    fMean.aliasName = "Mean"
    fldMean.outputField = fMean
    fldMean.mergeRule= "Mean"
    fmap.addFieldMap(fldMean)

    # Create fieldmap for StdDev
    fldEcartype = arcpy.FieldMap()
    fldEcartype.addInputField("Slope","grid_code")
    fEcartype = fldEcartype.outputField
    fEcartype.name = "Stdv"
    fEcartype.aliasName = "Stdv"
    fldEcartype.outputField = fEcartype
    fldEcartype.mergeRule = "StdDev"
    fmap.addFieldMap(fldEcartype)

    # Perform de spatial join
    arcpy.SpatialJoin_analysis(emprise, "Slope", OutputFc, "", "", fmap)

    # Create a field
    arcpy.AddField_management(OutputFc, "Prof_Typ", "TEXT")

    # Delete Field:
    for fld in arcpy.ListFields(OutputFc):
        if fld.name not in [idfield,"Stdv","Mean","Prof_Typ"]:
            try:
                arcpy.DeleteField_management(OutputFc,fld.name)
            except:
                pass

    # Evaluer la pente avec les bornes
    b1 = bornes[0]
    b2 = bornes[1]
    Code_bloc="""def Eval(Moyenne, EcarType):
        if Moyenne > """+str(b2)+ """ and EcarType < """+str(b1)+ """ : ProfilType = "Asc/Desc_Continue"
        if Moyenne < """+str(b2)+ """ and EcarType < """+str(b1)+ """ : ProfilType = "Plat"
        else : ProfilType = "Hétérogène"
        return ProfilType
    """
    expression = "Eval(!Mean!,!Stdv!)"

    # Calcul du champ Prof Typ
    arcpy.CalculateField_management(OutputFc, "Prof_Typ", expression, "PYTHON_9.3", Code_bloc)

    # Return the result
    return OutputFc
Exemplo n.º 26
0
 def extract(night_image):
     return ExtractByMask(night_image, UNITED_STATE_MASK)
def calculate_topographic_properties(**kwargs):
    """
    Description: calculates topographic properties from an elevation raster
    Inputs: 'z_unit' -- a string value of either 'Meter' or 'Foot' representing the vertical unit of the elevation raster
            'input_array' -- an array containing the grid raster (must be first) and the elevation raster
            'output_array' -- an array containing the output rasters for aspect, compound topographic index, heat load index, integrated moisture index, roughness, site exposure, slope, surface area ratio, and surface relief ratio (in that order)
    Returned Value: Returns a raster dataset on disk for each topographic property
    Preconditions: requires an input DEM that can be created through other scripts in this repository
    """

    # Import packages
    import arcpy
    from arcpy.sa import Con
    from arcpy.sa import IsNull
    from arcpy.sa import ExtractByMask
    from arcpy.sa import Raster
    from arcpy.sa import Int
    from arcpy.sa import FlowDirection
    from arcpy.sa import FlowAccumulation
    from arcpy.sa import Slope
    from arcpy.sa import Aspect
    from package_Geomorphometry import compound_topographic
    from package_Geomorphometry import getZFactor
    from package_Geomorphometry import linear_aspect
    from package_Geomorphometry import mean_slope
    from package_Geomorphometry import roughness
    from package_Geomorphometry import site_exposure
    from package_Geomorphometry import surface_area
    from package_Geomorphometry import surface_relief
    from package_Geomorphometry import topographic_position
    from package_Geomorphometry import topographic_radiation
    import datetime
    import os
    import time

    # Parse key word argument inputs
    z_unit = kwargs['z_unit']
    grid_raster = kwargs['input_array'][0]
    elevation_input = kwargs['input_array'][1]
    elevation_output = kwargs['output_array'][0]
    aspect_output = kwargs['output_array'][1]
    cti_output = kwargs['output_array'][2]
    roughness_output = kwargs['output_array'][3]
    exposure_output = kwargs['output_array'][4]
    slope_output = kwargs['output_array'][5]
    area_output = kwargs['output_array'][6]
    relief_output = kwargs['output_array'][7]
    position_output = kwargs['output_array'][8]
    radiation_output = kwargs['output_array'][9]

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Use two thirds of cores on processes that can be split.
    arcpy.env.parallelProcessingFactor = "75%"

    # Set snap raster and extent
    arcpy.env.snapRaster = grid_raster
    arcpy.env.extent = Raster(grid_raster).extent

    # Define folder structure
    grid_title = os.path.splitext(os.path.split(grid_raster)[1])[0]
    raster_folder = os.path.split(elevation_output)[0]
    intermediate_folder = os.path.join(raster_folder, 'intermediate')
    # Create raster folder if it does not already exist
    if os.path.exists(raster_folder) == 0:
        os.mkdir(raster_folder)
    # Create intermediate folder if it does not already exist
    if os.path.exists(intermediate_folder) == 0:
        os.mkdir(intermediate_folder)

    # Define intermediate datasets
    flow_direction_raster = os.path.join(intermediate_folder,
                                         'flow_direction.tif')
    flow_accumulation_raster = os.path.join(intermediate_folder,
                                            'flow_accumulation.tif')
    raw_slope_raster = os.path.join(intermediate_folder, 'raw_slope.tif')
    raw_aspect_raster = os.path.join(intermediate_folder, 'raw_aspect.tif')

    # Get the z factor appropriate to the xy and z units
    zFactor = getZFactor(elevation_input, z_unit)

    #### CALCULATE INTERMEDIATE DATASETS

    # Calculate flow direction if it does not already exist
    if os.path.exists(flow_direction_raster) == 0:
        # Calculate flow direction
        print(f'\tCalculating flow direction for {grid_title}...')
        iteration_start = time.time()
        flow_direction = FlowDirection(elevation_input, 'NORMAL', '', 'D8')
        flow_direction.save(flow_direction_raster)
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tFlow direction already exists for {grid_title}.')
        print('\t----------')

    # Calculate flow accumulation if it does not already exist
    if os.path.exists(flow_accumulation_raster) == 0:
        # Calculate flow accumulation
        print(f'\tCalculating flow accumulation for {grid_title}...')
        iteration_start = time.time()
        flow_accumulation = FlowAccumulation(flow_direction_raster, '',
                                             'FLOAT', 'D8')
        flow_accumulation.save(flow_accumulation_raster)
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tFlow accumulation already exists for {grid_title}.')
        print('\t----------')

    # Calculate raw slope in degrees if it does not already exist
    if os.path.exists(raw_slope_raster) == 0:
        # Calculate slope
        print(f'\tCalculating raw slope for {grid_title}...')
        iteration_start = time.time()
        raw_slope = Slope(elevation_input, "DEGREE", zFactor)
        raw_slope.save(raw_slope_raster)
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tRaw slope already exists for {grid_title}.')
        print('\t----------')

    # Calculate raw aspect if it does not already exist
    if os.path.exists(raw_aspect_raster) == 0:
        # Calculate aspect
        print(f'\tCalculating raw aspect for {grid_title}...')
        iteration_start = time.time()
        raw_aspect = Aspect(elevation_input, 'PLANAR', z_unit)
        raw_aspect.save(raw_aspect_raster)
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tRaw aspect already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE INTEGER ELEVATION

    # Calculate integer elevation if it does not already exist
    if arcpy.Exists(elevation_output) == 0:
        print(f'\tCalculating integer elevation for {grid_title}...')
        iteration_start = time.time()
        # Round to integer
        print(f'\t\tConverting values to integers...')
        integer_elevation = Int(Raster(elevation_input) + 0.5)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(integer_elevation, elevation_output, '',
                                    '', '-32768', 'NONE', 'NONE',
                                    '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF',
                                    'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tInteger elevation already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE LINEAR ASPECT

    # Calculate linear aspect if it does not already exist
    if arcpy.Exists(aspect_output) == 0:
        print(f'\tCalculating linear aspect for {grid_title}...')
        iteration_start = time.time()
        # Create an initial linear aspect calculation using the linear aspect function
        aspect_intermediate = os.path.splitext(
            aspect_output)[0] + '_intermediate.tif'
        linear_aspect(raw_aspect_raster, aspect_intermediate)
        # Round to integer
        print(f'\t\tConverting values to integers...')
        integer_aspect = Int(Raster(aspect_intermediate) + 0.5)
        # Fill missing data (no aspect) with values of -1
        print(f'\t\tFilling values of no aspect...')
        conditional_aspect = Con(IsNull(integer_aspect), -1, integer_aspect)
        # Extract filled raster to grid mask
        print(f'\t\tExtracting filled raster to grid...')
        extract_aspect = ExtractByMask(conditional_aspect, grid_raster)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(extract_aspect, aspect_output, '', '',
                                    '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                    'NONE', 'NONE', 'TIFF', 'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(aspect_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tLinear aspect already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE COMPOUND TOPOGRAPHIC INDEX

    # Calculate compound topographic index if it does not already exist
    if arcpy.Exists(cti_output) == 0:
        print(f'\tCalculating compound topographic index for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate compound topographic index calculation
        cti_intermediate = os.path.splitext(
            cti_output)[0] + '_intermediate.tif'
        compound_topographic(elevation_input, flow_accumulation_raster,
                             raw_slope_raster, cti_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_compound = Int((Raster(cti_intermediate) * 100) + 0.5)
        # Copy integer raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(integer_compound, cti_output, '', '',
                                    '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                    'NONE', 'NONE', 'TIFF', 'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(cti_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tCompound topographic index already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE ROUGHNESS

    # Calculate roughness if it does not already exist
    if arcpy.Exists(roughness_output) == 0:
        print(f'\tCalculating roughness for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate compound topographic index calculation
        roughness_intermediate = os.path.splitext(
            roughness_output)[0] + '_intermediate.tif'
        roughness(elevation_input, roughness_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_roughness = Int(Raster(roughness_intermediate) + 0.5)
        # Fill missing data (no aspect) with values of 0
        print(f'\t\tFilling values of roughness...')
        conditional_roughness = Con(IsNull(integer_roughness), 0,
                                    integer_roughness)
        # Extract filled raster to grid mask
        print(f'\t\tExtracting filled raster to grid...')
        extract_roughness = ExtractByMask(conditional_roughness, grid_raster)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(extract_roughness, roughness_output, '',
                                    '', '-32768', 'NONE', 'NONE',
                                    '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF',
                                    'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(roughness_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tRoughness already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE SITE EXPOSURE

    # Calculate site exposure if it does not already exist
    if arcpy.Exists(exposure_output) == 0:
        print(f'\tCalculating site exposure for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate compound topographic index calculation
        exposure_intermediate = os.path.splitext(
            exposure_output)[0] + '_intermediate.tif'
        site_exposure(raw_aspect_raster, raw_slope_raster,
                      exposure_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_exposure = Int((Raster(exposure_intermediate) * 100) + 0.5)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(integer_exposure, exposure_output, '', '',
                                    '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                    'NONE', 'NONE', 'TIFF', 'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(exposure_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tSite exposure already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE MEAN SLOPE

    # Calculate mean slope if it does not already exist
    if arcpy.Exists(slope_output) == 0:
        print(f'\tCalculating mean slope for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate mean slope calculation
        slope_intermediate = os.path.splitext(
            slope_output)[0] + '_intermediate.tif'
        mean_slope(raw_slope_raster, slope_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_slope = Int(Raster(slope_intermediate) + 0.5)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(integer_slope, slope_output, '', '',
                                    '-128', 'NONE', 'NONE', '8_BIT_SIGNED',
                                    'NONE', 'NONE', 'TIFF', 'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(slope_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tMean slope already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE SURFACE AREA RATIO

    # Calculate surface area ratio if it does not already exist
    if os.path.exists(area_output) == 0:
        print(f'\tCalculating surface area ratio for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate surface area ratio calculation
        area_intermediate = os.path.splitext(
            area_output)[0] + '_intermediate.tif'
        surface_area(raw_slope_raster, area_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_area = Int((Raster(area_intermediate) * 10) + 0.5)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(integer_area, area_output, '', '',
                                    '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                    'NONE', 'NONE', 'TIFF', 'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(area_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tSurface area ratio already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE SURFACE RELIEF RATIO

    # Calculate surface relief ratio if it does not already exist
    if arcpy.Exists(relief_output) == 0:
        print(f'\tCalculating surface relief ratio for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate surface relief ratio calculation
        relief_intermediate = os.path.splitext(
            relief_output)[0] + '_intermediate.tif'
        surface_relief(elevation_input, relief_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_relief = Int((Raster(relief_intermediate) * 1000) + 0.5)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(integer_relief, relief_output, '', '',
                                    '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                    'NONE', 'NONE', 'TIFF', 'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(relief_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tSurface relief ratio already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE TOPOGRAPHIC POSITION

    # Calculate topographic position if it does not already exist
    if arcpy.Exists(position_output) == 0:
        print(f'\tCalculating topographic position for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate topographic position calculation
        position_intermediate = os.path.splitext(
            position_output)[0] + '_intermediate.tif'
        topographic_position(elevation_input, position_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_position = Int((Raster(position_intermediate) * 100) + 0.5)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(integer_position, position_output, '', '',
                                    '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                    'NONE', 'NONE', 'TIFF', 'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(position_intermediate)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tTopographic position already exists for {grid_title}.')
        print('\t----------')

    #### CALCULATE TOPOGRAPHIC RADIATION

    # Calculate topographic radiation if it does not already exist
    if arcpy.Exists(radiation_output) == 0:
        print(f'\tCalculating topographic radiation for {grid_title}...')
        iteration_start = time.time()
        # Create an intermediate topographic position calculation
        radiation_intermediate = os.path.splitext(
            radiation_output)[0] + '_intermediate.tif'
        radiation_integer = os.path.splitext(
            radiation_output)[0] + '_integer.tif'
        topographic_radiation(elevation_input, radiation_intermediate)
        # Convert to integer values
        print(f'\t\tConverting values to integers...')
        integer_radiation = Int((Raster(radiation_intermediate) * 1000) + 0.5)
        arcpy.management.CopyRaster(integer_radiation, radiation_integer, '',
                                    '', '-32768', 'NONE', 'NONE',
                                    '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF',
                                    'NONE')
        # Extract filled raster to grid mask
        print(f'\t\tExtracting integer raster to grid...')
        extract_radiation = ExtractByMask(radiation_integer, grid_raster)
        # Copy extracted raster to output
        print(f'\t\tCreating output raster...')
        arcpy.management.CopyRaster(extract_radiation, radiation_output, '',
                                    '', '-32768', 'NONE', 'NONE',
                                    '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF',
                                    'NONE')
        # End timing
        iteration_end = time.time()
        iteration_elapsed = int(iteration_end - iteration_start)
        iteration_success_time = datetime.datetime.now()
        # Delete intermediate dataset if possible
        try:
            arcpy.management.Delete(radiation_intermediate)
            arcpy.management.Delete(radiation_integer)
        except:
            print('\t\tCould not delete intermediate dataset...')
        # Report success
        print(
            f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
        )
        print('\t----------')
    else:
        print(f'\tTopographic radiation already exists for {grid_title}.')
        print('\t----------')

    outprocess = f'Finished topographic properties for {grid_title}.'
    return outprocess
Exemplo n.º 28
0
def evi_raster_shrink(evi_raster_path, feature_points_path, radius):
    if not os.path.exists(os.path.join(env.workspace, "feature_points_buffer.shp")):
        arcpy.Buffer_analysis(feature_points_path, "feature_points_buffer", radius, dissolve_option="ALL")
    out_raster = ExtractByMask(evi_raster_path, "feature_points_buffer.shp")
    out_raster.save("evi_raster_shrink.tif")
    return out_raster
Exemplo n.º 29
0
def extract_by_mask(in_raster, in_mask):
    out_raster = ExtractByMask(in_raster, in_mask)
    return out_raster
Exemplo n.º 30
0
def IceCliffLocation(workspace,dem,tileDebarea,pixel,skinny,minSlope,n_iterations,L_e,alpha,beta_e,A_min,phi,gamma):
    import sys
    import os
    import arcpy
    from arcpy import env
    from arcpy.sa import Slope, ExtractByMask, Raster, SetNull, Int
    import matplotlib.pyplot as plt
    import numpy as np
    from numpy import array
    from scipy.optimize import curve_fit
    env.overwriteOutput = True

    try:
        import arcinfo
    except:
        sys.exit("ArcInfo license not available")
        arcpy.AddMessage("ArcInfo license not available")
    if arcpy.CheckExtension("spatial") == "Available":
        arcpy.CheckOutExtension("spatial")
    else:
        sys.exit("Spatial Analyst license not available")
        arcpy.AddMessage("Spatial Analyst license not available")
        
    #Parameters that should be stable:
    slopeLimit = 90 # slope detection capped at this value
    
    ## Loop for optimizing slope
    if str(workspace.split("\\")[-1]) == 'Final':
        n = []
        n.append(minSlope)        
    else:
        minSlope = 0
        n = np.arange(minSlope,slopeLimit,(slopeLimit-minSlope)/n_iterations)

    skipIteration = []
    for minSlope in n:
        
        # check for existing iterations if code has previously run but crashed. 
        if arcpy.ListFeatureClasses("*cliffMap*"):
            fcListPrior = arcpy.ListFeatureClasses("*cliffMap*")
            skipIteration = []
            for prior_i in fcListPrior:
                if int(prior_i[14:16]) == int("%02d" % (int(minSlope),)):
                    skipIteration = 1
        if skipIteration == 1:
            continue

        ## Ice Cliff code  
        if skinny == 'false':
            print 'IceCliffLocation script started...'
        if skinny == 'true':
            print 'skinny IceCliffLocation script started...'
            
        # Parameter that probably should be 0
        minProb = 0 # probability associated with minSlope.
        
        arcpy.CopyFeatures_management(tileDebarea, workspace+"\\del_debarea.shp")
        debarea_iteration = workspace+"\\del_debarea.shp"
        arcpy.env.snapRaster = dem
        outExtractSlope = ExtractByMask(dem, debarea_iteration)

        outExtractSlope.save("dem_extract.TIF")
        if int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))) == pixel:
            dem = "dem_extract.TIF"
        else:    
            arcpy.Resample_management("dem_extract.TIF", "dem_extractResample.TIF", pixel, "NEAREST")
            arcpy.env.snapRaster = dem
            print "DEM resampeld from "+str(int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))))+' to '+str(pixel)
            dem = "dem_extractResample.TIF"
        
        # Create slope raster
        outSlope = Slope(dem, "DEGREE", 1)
        outSlope.save("del_slope.TIF")
    
        # Isolate slope values above minSlope 
        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(minSlope))
        outSetNull.save("del_minSlope.TIF")       
    
        # Exit process if no cliffs exist
        nocliff = arcpy.GetRasterProperties_management(Int("del_minSlope.TIF"), "ALLNODATA")
        if int(str(nocliff)) == 1:
            print "No area with a slope above "+str(minSlope)+"."
        elif float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MINIMUM"))) == 0:
            print "Only one pixel with a slope above "+str(minSlope)+", iteration skipped."
        else:
            minMean = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "MEAN"))) 
            minSD = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "STD"))) 

            areaSlope = minMean
            
            print 'areaSlope = ' + str(areaSlope)
            
            # Isolate slope values above areaSlope 
            outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(areaSlope))
            outSetNull.save("del_areaSlope.TIF")
            arcpy.env.snapRaster = dem  
                        
            # Exit process if no cliffs exist
            nocliff = arcpy.GetRasterProperties_management(Int("del_areaSlope.TIF"), "ALLNODATA")
            if int(str(nocliff)) == 1:
                print "No area with a slope above "+str(areaSlope)+"."
            elif float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MINIMUM"))) == 0:
                print "Only one pixel with a slope above "+str(areaSlope)+", iteration skipped."
            else: 
                seedSlope = minMean+minSD 
                print 'seedSlope = ' + str(seedSlope)
                
                # Isolate slope values above areaSlope 
                outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope))
                outSetNull.save("del_seedSlope.TIF")

                # Exit process if no cliffs exist
                nocliff = arcpy.GetRasterProperties_management(Int("del_seedSlope.TIF"), "ALLNODATA")
                if int(str(nocliff)) == 1:
                    print "No seed area with a slope above "+str(seedSlope)+"."
                else:                    
                    # to int speeds up computation time
                    outInt = Int("del_areaSlope.TIF")
                    outInt.save("del_minSlopeInt.TIF")
                    outInt = Int("del_seedSlope.TIF")
                    outInt.save("del_seedSlopeInt.TIF")                  
                        
                    arcpy.RasterToPolygon_conversion("del_minSlopeInt.TIF", "del_minCliffSlope.shp", "NO_SIMPLIFY", "VALUE")
                    arcpy.AddField_management("del_minCliffSlope.shp", "value", "SHORT", 1, "", "", "", "", "")
                    arcpy.Dissolve_management("del_minCliffSlope.shp", "del_minCliff_dissolve.shp", "value")
                    arcpy.MultipartToSinglepart_management("del_minCliff_dissolve.shp", "del_minCliff_explode.shp")
                    arcpy.AddField_management("del_minCliff_explode.shp",'Area','FLOAT')
                    rows = arcpy.UpdateCursor("del_minCliff_explode.shp")
                    for row in rows:
                        areacliff = row.shape.area
                        row.Area = areacliff 
                        rows.updateRow(row)
                    del row, rows
                    arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"_CliffArea.shp")
                    
                    # skinny/non-skinny fix for ending iteration. 0 = no skip, 1 = skip
                    skip_iter = 0 
                    
                    # skinny ice cliffs, does not include ice cliff end extension to speed up computations
                    if skinny == 'true':
                        if arcpy.management.GetCount("del_minCliff_explode.shp")[0] == "0":
                            skip_iter = 1
                            print "No area within del_minCliff_explode.shp, skinny iteration skipped."
                        else:
                            # "_FinalCliffShape.shp" and "_cliffArea.shp" are the same if skinny == true
                            arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp")
                            # copy working .shp, used below
                            arcpy.CopyFeatures_management('del_minCliff_explode.shp', 'del_lineAndArea_area.shp')
                            arcpy.CalculateAreas_stats('del_minCliff_explode.shp', 'del_lineAndArea_area.shp')
                            arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer')
                            expression = 'F_AREA <=' + str((pixel**2)*A_min)
                            arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression)
                            arcpy.DeleteFeatures_management('tempLayer')
                            arcpy.Delete_management('tempLayer')

                    if skinny == 'false':    
                        # buffer in/out area to break up attached features
                        arcpy.Buffer_analysis("del_minCliff_explode.shp", "del_extendLineBuffer.shp", (pixel/2)-0.1, "FULL", "ROUND", "NONE")
    
                        # Generate ice cliff centerlines from Voronoi cells
                        if arcpy.management.GetCount("del_extendLineBuffer.shp")[0] == "0":
                            arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp")
                            skip_iter = 1
                            print "No area within the criteria defined by seed area value "+str(seedSlope)+", iteration stopped before centerlines."
                        else:
                            arcpy.FeatureToLine_management("del_extendLineBuffer.shp","del_line.shp","","ATTRIBUTES")
                            arcpy.Densify_edit("del_line.shp", "","5", "", "")
                            arcpy.FeatureVerticesToPoints_management ("del_line.shp", "del_verti.shp", "ALL")
                            arcpy.CreateThiessenPolygons_analysis("del_verti.shp","del_voronoiCells.shp" ,"ONLY_FID") 
                            arcpy.RepairGeometry_management("del_voronoiCells.shp")
                            
                            #use geodatabase here due to unexpected error: "Invalid Topology [Duplicate segment.]"
                            arcpy.CreateFileGDB_management(workspace, "fGDB.gdb")
                            fgdb = workspace+"\\fGDB.gdb"
                            #arcpy.env.workspace = fgdb
                            arcpy.Clip_analysis(workspace+"\\del_voronoiCells.shp", workspace+"\\del_extendLineBuffer.shp", fgdb+"\\shp","")
                            arcpy.FeatureToLine_management(fgdb+"\\shp", workspace+"\\del_toLine.shp", "", attributes="ATTRIBUTES")
                            arcpy.Delete_management(fgdb)
                            #arcpy.env.workspace = workspace
                            
                            #arcpy.FeatureToLine_management("del_voronoiCellsClip.shp","del_toLine.shp", "", attributes="ATTRIBUTES")
                            arcpy.MakeFeatureLayer_management("del_toLine.shp", "tempLayer", "", "", "")
                            arcpy.SelectLayerByLocation_management("tempLayer", "CROSSED_BY_THE_OUTLINE_OF","del_minCliff_explode.shp","","NEW_SELECTION")
                            arcpy.DeleteFeatures_management("tempLayer")
                            arcpy.Delete_management("tempLayer")
                            arcpy.Intersect_analysis(["del_toLine.shp",'del_minCliff_explode.shp'],"del_lineIntersect.shp")
                            arcpy.Dissolve_management("del_lineIntersect.shp", "del_toLineDis.shp", "", "", "SINGLE_PART", "DISSOLVE_LINES")
                            arcpy.UnsplitLine_management("del_toLineDis.shp","del_unsplit.shp","Id")
                            arcpy.MakeFeatureLayer_management("del_unsplit.shp", "tempLayer2", "", "", "")
                            arcpy.SelectLayerByLocation_management("tempLayer2", "BOUNDARY_TOUCHES","del_minCliff_explode.shp","","NEW_SELECTION")
                            arcpy.DeleteFeatures_management("tempLayer2")
                            arcpy.Delete_management("tempLayer2")
                            arcpy.cartography.SimplifyLine("del_unsplit.shp","del_clineSimpExp.shp","POINT_REMOVE",10)
                            arcpy.AddField_management("del_clineSimpExp.shp", "value", "SHORT", 1, "", "", "", "", "")
                            arcpy.Dissolve_management("del_clineSimpExp.shp", "del_clineSimp.shp", "value")
                            arcpy.TrimLine_edit("del_clineSimp.shp", "8 meters", "KEEP_SHORT")
                            arcpy.CopyFeatures_management("del_unsplit.shp", "min"+str("%02d" % (minSlope,))+"_Centerlines.shp")
                            
                            #refine centerline for final map
                            if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0":
                                arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp")
                                skip_iter = 1
                                print "No area big enough to generate a centerline, iteration skipped."
                            else:                        
                            
                                # extend lines to capture cliff ends
                                count = 0
                                print "Extend line started..."
                                
                                jlist = [(pixel/2)-0.1] * int(round(L_e/(pixel/2)))
                                for j in jlist:
                                    #create buffer out to set the limit a line will be extended to
                                    arcpy.Buffer_analysis("del_clineSimp.shp", "del_clineSimpBuff1.shp", j, "FULL", "ROUND", "ALL")
                                    arcpy.PolygonToLine_management("del_clineSimpBuff1.shp","del_clineSimpBuff1line.shp")
                                    #merge centerline and bufferline
                                    arcpy.Merge_management(["del_clineSimp.shp","del_clineSimpBuff1line.shp"], "del_clineSimpBuff1merge_dis.shp")
                                    arcpy.Delete_management("del_clineSimp.shp")
                                    print "Extend line "+str(count)+" started..."
                                    arcpy.MultipartToSinglepart_management("del_clineSimpBuff1merge_dis.shp", "del_clineSimpBuff1merge.shp")
                                    arcpy.MakeFeatureLayer_management("del_clineSimpBuff1merge.shp", "lineLayer", "", "", "")
                                    arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION", "INVERT")
                                    arcpy.ExtendLine_edit("del_clineSimpBuff1merge.shp", str(j+1)+" meters", "EXTENSION")
                                    
                                    #select share a line segment with buffer to remove buffer
                                     
                                    arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION") 
                                    arcpy.DeleteFeatures_management("lineLayer")
                                    arcpy.Delete_management("lineLayer")
                                    arcpy.CopyFeatures_management("del_clineSimpBuff1merge.shp", "del_clineSimp.shp")
                                    arcpy.Delete_management("del_clineSimpBuff1.shp")
                                    arcpy.Delete_management("del_clineSimpBuff1line.shp")
                                    arcpy.Delete_management("del_clineSimpBuff1merge.shp")
                                    count = count + j                                
                                del j, jlist
        
                                #remove last short ribs with a lenght threhold then reattach centerlines that may have been split
                                # calculate lenght of each centerline
                                if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0":
                                    arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_explode.shp")
                                    skip_iter = 1
                                    print "Centerline shape empty, iteration skipped."
                                else:
                                    arcpy.AddField_management("del_clineSimp.shp",'L','FLOAT')
                                    rows = arcpy.UpdateCursor("del_clineSimp.shp")
                                    for row in rows:
                                        areacliff = row.shape.length
                                        row.L = areacliff 
                                        rows.updateRow(row)
                                    del row, rows
                                    arcpy.CopyFeatures_management("del_clineSimp.shp", "min"+str("%02d" % (minSlope,))+"_extendedCenterlines.shp")
                                    
                                    # buffer out centerlines to capture end area removed in earlier buffer
                                    arcpy.Buffer_analysis("del_clineSimp.shp", "del_CliffCenterlineOut.shp", ((alpha*pixel*(2**(1/2)))/2), "FULL", "ROUND", "NONE")
            
                                    # define area with a slope less than that which defined "del_minCliff_dissolve.shp"
                                    edgeAreaSlope = areaSlope-beta_e
                                    print "Edge area defined by slope "+str(edgeAreaSlope)
                                    outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(edgeAreaSlope))
                                    outSetNull.save("del_edgeSlope.TIF") 
                                   
                                    outInt = Int("del_edgeSlope.TIF")
                                    outInt.save("del_edgeSlopeInt.TIF")                    
                                    arcpy.RasterToPolygon_conversion("del_edgeSlopeInt.TIF", "del_edgeAreaSlope.shp", "NO_SIMPLIFY", "VALUE")
                                    arcpy.AddField_management("del_edgeAreaSlope.shp", "value", "SHORT", 1, "", "", "", "", "")
                                    arcpy.Dissolve_management("del_edgeAreaSlope.shp", "del_edgeAreaSlope_dissolve.shp", "value")
                                    arcpy.CopyFeatures_management("del_edgeAreaSlope_dissolve.shp", "min"+str("%02d" % (minSlope,))+"_edgeArea.shp")
                                    arcpy.Intersect_analysis (["del_edgeAreaSlope_dissolve.shp", "del_CliffCenterlineOut.shp"], "del_betaF_edgeArea.shp")
                        
                                    # merge buffered lines with buffered area                    
                                    arcpy.Merge_management(["del_betaF_edgeArea.shp", "del_minCliff_explode.shp"], "del_lineAndArea.shp")
                                    arcpy.AddField_management("del_lineAndArea.shp", "valueDis", "SHORT", 1, "", "", "", "", "")                    
                                    arcpy.Dissolve_management("del_lineAndArea.shp", "del_lineAndArea_dissolve1.shp", "valueDis")
                                    arcpy.RepairGeometry_management("del_lineAndArea_dissolve1.shp")
                                    # fill holes and remove shapes less than one pixel to avoid error from buffer tool
                                    arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolve1.shp", "del_lineAndArea_explode1.shp")
                                    arcpy.CalculateAreas_stats("del_lineAndArea_explode1.shp", 'del_lineAndArea_area1.shp')
                                    arcpy.MakeFeatureLayer_management('del_lineAndArea_area1.shp', 'tempLayer')
                                    expression = 'F_AREA <' + str(pixel**2) # m2
                                    arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression)
                                    arcpy.DeleteFeatures_management('tempLayer')
                                    arcpy.Delete_management('tempLayer')
                                    arcpy.cartography.AggregatePolygons('del_lineAndArea_area1.shp', "del_lineAndArea_dissolve.shp", 1, 0, pixel**2, 'NON_ORTHOGONAL') 
                                                       
                                    arcpy.RepairGeometry_management("del_lineAndArea_dissolve.shp")
                                    # buffer in to reomve sliver geometries and out to make a diagonal set of single pixel shapes one feature
                                    arcpy.Buffer_analysis("del_lineAndArea_dissolve.shp", "del_lineAndArea_dissolveSmallBufferIn.shp", -0.5, "FULL", "ROUND", "ALL")
                                    arcpy.Buffer_analysis("del_lineAndArea_dissolveSmallBufferIn.shp", "del_lineAndArea_dissolveSmallBuffer.shp", 1, "FULL", "ROUND", "ALL")
                                    arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolveSmallBuffer.shp", "del_lineAndArea_explode.shp")
                                    arcpy.CalculateAreas_stats('del_lineAndArea_explode.shp', 'del_lineAndArea_area.shp')
                                    arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer')
                                    expression = 'F_AREA <=' + str((pixel**2)*A_min)
                                    arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression)
                                    arcpy.DeleteFeatures_management('tempLayer')
                                    arcpy.Delete_management('tempLayer')
                                    
                                    if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0":
                                        print "del_lineAndArea_area.shp empty, iteration stopped."
                                        skip_iter = 1
                                    else:
                                        arcpy.AddField_management("del_lineAndArea_area.shp", "value", "SHORT", 1, "", "", "", "", "")
                                        arcpy.CopyFeatures_management('del_lineAndArea_area.shp', "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp")                         
                    if skip_iter == 0:
                        # CDF for values between minSlope and maxSlope
                        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE >= "+ str(minSlope))
                        outSetNull.save("del_min.TIF")
                        arcpy.RasterToFloat_conversion("del_min.TIF", "del_min.flt")
                        minsl = Raster('del_min.flt')
                        slopemin = minsl*0.0
                        slopemin.save('del_minSl.TIF')            
                            
                        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE > "+ str(seedSlope))
                        outSetNull = SetNull(outSetNull, outSetNull, "VALUE < "+ str(minSlope))
                        outSetNull.save("del_mid.TIF")
                        arcpy.RasterToFloat_conversion("del_mid.TIF", "del_mid.flt")
                        midsl = Raster('del_mid.flt')
                        b = (1-(((1-minProb)/(seedSlope-minSlope))*seedSlope))
                        slopemid = (((1-minProb)/(seedSlope-minSlope))*midsl)+b
                        arcpy.env.snapRaster = dem
                        slopemid.save('del_midSl.TIF')
                        arcpy.env.snapRaster = dem
        
                        outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope))
                        outSetNull.save("del_max.TIF")
                        arcpy.RasterToFloat_conversion("del_max.TIF", "del_max.flt")
                        maxsl = Raster('del_max.flt')
                        slopemax = maxsl*0.0+1.0
                        arcpy.env.snapRaster = dem
                        slopemax.save('del_maxSl.TIF')
                        arcpy.env.snapRaster = dem
                                
                        arcpy.MosaicToNewRaster_management("del_minSl.TIF;del_midSl.TIF;del_maxSl.TIF", workspace, "del_cliffProbabilitySlope.TIF", "", "32_BIT_FLOAT", "", "1", "LAST","FIRST")
                        arcpy.env.snapRaster = dem
        
                        # extract cliff probability and apply reduction factor to area outside of buffer.shp
                        if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0":
                            print "del_lineAndArea_area.shp is empty, did not create: CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA"  + str(int(areaSlope))+".TIF"
                        else:  
                            outExtractSlope = ExtractByMask("del_cliffProbabilitySlope.TIF", "del_lineAndArea_area.shp")
                            outExtractSlope.save("del_final_cliffs_found.TIF")
                            
                            arcpy.RasterToFloat_conversion("del_cliffProbabilitySlope.TIF", "del_CliffProbabilitySlope.flt")
                            CliffProbabilitySlope = Raster('del_CliffProbabilitySlope.flt')
                            CliffProbabilitySlopeREDUCED = CliffProbabilitySlope*phi
                            arcpy.env.snapRaster = dem
                            CliffProbabilitySlopeREDUCED.save('del_CliffProbabilitySlopeREDUCED.TIF')
            
                            arcpy.MosaicToNewRaster_management("del_final_cliffs_found.TIF;del_CliffProbabilitySlopeREDUCED.TIF", workspace, "CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA"  + str(int(areaSlope))+".TIF", "", "32_BIT_FLOAT", "", "1", "FIRST","FIRST")
                            arcpy.env.snapRaster = dem
                            
                            del CliffProbabilitySlope
                            del CliffProbabilitySlopeREDUCED
                                                       
                        del minsl
                        del midsl
                        del maxsl


                ## ----------------------------------
                ## Compute percent cliff in total spatial domain

                cliff_area_sum = 0
                debris_area_sum = 0
                Perc_Cliff = 0
                arcpy.CalculateAreas_stats(debarea_iteration, 'del_debris_area.shp')
                with arcpy.da.SearchCursor('del_debris_area.shp', ['F_AREA']) as cursor:
                    for row in cursor:
                        debris_area_sum += row[0]                
                                
                if os.path.isfile(workspace+'\\del_lineAndArea_area.shp') == False:
                    print "'del_lineAndArea_area.shp'does not exist."
                elif arcpy.management.GetCount('del_lineAndArea_area.shp')[0] == "0":
                    print "No area within 'del_lineAndArea_area.shp'."
                else:
                    with arcpy.da.SearchCursor('del_lineAndArea_area.shp', ['F_AREA']) as cursor:
                        for row in cursor:
                            cliff_area_sum += row[0]
                    Perc_Cliff = (cliff_area_sum/debris_area_sum)*100
                    arcpy.Dissolve_management("del_lineAndArea_area.shp", 'cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp', "value")
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','minSlope','FLOAT')
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Cliff','FLOAT')
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Deb','FLOAT')
                    
                    arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Perc_Cliff','FLOAT')
                    rows = arcpy.UpdateCursor('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp')
                    for row in rows:
                        row.setValue('Area_Cliff', cliff_area_sum)
                        row.setValue('Area_Deb', debris_area_sum)
                        row.setValue('minSlope', minSlope)
                        row.setValue('Perc_Cliff', Perc_Cliff)
                        rows.updateRow(row)
                    del row, rows
                                     
                    print 'IceCliffLocation script [minSlope: ' + str("%02d" % (int(minSlope),)) + ' areaSlope: ' + str(int(areaSlope))+ '] done...'
                                         
        rasterList = arcpy.ListRasters("*del*")
        for raster in rasterList:
            arcpy.Delete_management(raster)
        del raster
        del rasterList

        fcList = arcpy.ListFeatureClasses("*del*")
        for fc in fcList:
            arcpy.Delete_management(fc)
        del fc
        del fcList

        print "intermediate files deleted"
            
    del minSlope
    del n
    
    if str(workspace.split("\\")[-1]) == 'Final':
        print "Script complete"        
    else:
        initialSlope_doubles = []
        percentCliffs_doubles = []
        initialSlope = []
        percentCliffs = []
        xfit = []
        yfit = []
        fcList = []
        arr = []
        fcList = arcpy.ListFeatureClasses("*cliffMap*")
        arcpy.Merge_management(fcList, "mergedSolutions.shp")
        arr = arcpy.da.TableToNumPyArray("mergedSolutions.shp", ('Perc_Cliff','minSlope'))
        arcpy.Delete_management("del_mergedSolutions.shp")
        initialSlope_doubles = [row[1] for row in arr]
        percentCliffs_doubles = [row[0] for row in arr]
        
        #remove rows that are repeated due to (possible) earlier tiled dissolve from insufficient memory 
        for i,j in enumerate(initialSlope_doubles):
            if j != initialSlope_doubles[(i-1) % len(initialSlope_doubles)]:
                initialSlope.append(j)
        del i,j
        for i,j in enumerate(percentCliffs_doubles):
            if j != percentCliffs_doubles[(i-1) % len(percentCliffs_doubles)]:
                percentCliffs.append(j)
        del i,j
                
        def func(x,a,b,c):
            return a*np.exp(-((x-b)/c)**2)
        try:
            popt, pcov = curve_fit(func,initialSlope,percentCliffs, maxfev=1000)
        except RuntimeError:
            fig = plt.figure()
            ax1 = fig.add_subplot(111)
            ax1.plot(initialSlope, percentCliffs, 'ko');plt.draw()
            fig.show()            
            print("Error - curve_fit failed")
        xfit = np.linspace(min(initialSlope), max(initialSlope), 100)
        yfit = popt[0]*np.exp(-((xfit-popt[1])/popt[2])**2)
        
        def secondDer(x):
            return popt[0]*(((4*(x-popt[1])**2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**4)-((2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**2))
        a1 = []
        a1 = [i for i in xrange(91)]
        a2 = secondDer(a1)
        #the next 3 for loops and a[x] variables define 1 of the 2 points to derive the optimization line.
        a3 = []
        a4 = []
        # values of second derivative where slope is below 'gamma'
        for i, j in enumerate(a2):
            if j <= gamma:
                a3.append(i) == i
        # find the steepest point (in the middle of the side of the bell)
        for i, j in enumerate(a2):
            if j == max(a2):
                m=i
        # take only values to the right of 'm' in case the curve is flat at 0 slope
        for i in a3:
            if i > m:
                a4.append(i) == i
        del i,j
                
        ax = min(a4) 
        ay = popt[0]*np.exp(-((ax-popt[1])/popt[2])**2)
        
        #find max of bell for first point in optmization line
        yfit_array = array(yfit)        
        ftup = (np.where(yfit_array == max(yfit_array)))
        f = int(ftup[0]) # x,y index of max yfit 
                
        # d = distance from fit Equation 2 (Herreid and Pellicciotti, 2018) to line definded by ((xfit[0],yfit[0]),(ax,yx))
        d = abs((yfit[f]-ay)*xfit-(xfit[f]-ax)*yfit+xfit[f]*ay-yfit[f]*ax)/((yfit[f]-ay)**2+(xfit[f]-ax)**2)**(1/2)
        # crit is the index of the longest d
        crit = np.where(d == max(d))
        m = (yfit[f]-ay)/(xfit[f]-ax)
        b = yfit[f]-m*xfit[f]
        x_crit = (xfit[crit]+m*yfit[crit]-m*b)/(m**2+1)
        y_crit = m*((xfit[crit]+m*yfit[crit]-m*b)/(m**2+1))+b
        
        fig = plt.figure()
        ax1 = fig.add_subplot(111)
        ax1.plot(initialSlope, percentCliffs, 'ko'); plt.plot([xfit[f],ax],[yfit[f],ay]); plt.plot([xfit[crit],x_crit],[yfit[crit],y_crit]); plt.plot(xfit,yfit);plt.xlim(0, 100);plt.ylim(0, 100);plt.gca().set_aspect('equal', adjustable='box');plt.draw()
        ax1.set_xlabel(r'$\mathrm{\beta_i (^\circ)}$')
        ax1.set_ylabel('Ice cliff fraction (%)')
        fig.show()
        #fig.canvas.flush_events()
        import time
        time.sleep(1)
        #plt.pause(0.01)
        #plt.waitforbuttonpress()
        
        #save data used to make figure
        np.save(workspace+'\\figureData', (initialSlope, percentCliffs,[xfit[f],ax],[yfit[f],ay],[xfit[crit],x_crit],[yfit[crit],y_crit],xfit,yfit))

        IceCliffLocation.minSlope = float(xfit[crit])
def merge_spectral_tiles(**kwargs):
    """
    Description: extracts spectral tiles to an area and mosaics extracted tiles with first data priority
    Inputs: 'cell_size' -- a cell size for the output spectral raster
            'output_projection' -- the machine number for the output projection
            'work_geodatabase' -- a geodatabase to store temporary results
            'input_array' -- an array containing the grid raster (must be first), the study area raster (must be second), and the list of spectral tiles
            'output_array' -- an array containing the output spectral grid raster
    Returned Value: Returns a raster dataset on disk containing the merged spectral grid raster
    Preconditions: requires processed source spectral tiles and predefined grid
    """

    # Import packages
    import arcpy
    from arcpy.sa import ExtractByMask
    from arcpy.sa import IsNull
    from arcpy.sa import Nibble
    from arcpy.sa import Raster
    from arcpy.sa import SetNull
    import datetime
    import os
    import time

    # Parse key word argument inputs
    cell_size = kwargs['cell_size']
    output_projection = kwargs['output_projection']
    work_geodatabase = kwargs['work_geodatabase']
    tile_inputs = kwargs['input_array']
    grid_raster = tile_inputs.pop(0)
    study_area = tile_inputs.pop(0)
    spectral_grid = kwargs['output_array'][0]

    # Set overwrite option
    arcpy.env.overwriteOutput = True

    # Use two thirds of cores on processes that can be split.
    arcpy.env.parallelProcessingFactor = "75%"

    # Set snap raster and extent
    arcpy.env.snapRaster = study_area
    arcpy.env.extent = Raster(grid_raster).extent

    # Define the output coordinate system
    output_system = arcpy.SpatialReference(output_projection)

    # Define intermediate rasters
    mosaic_raster = os.path.splitext(spectral_grid)[0] + '_mosaic.tif'
    nibble_raster = os.path.splitext(spectral_grid)[0] + '_nibble.tif'
    spectral_area = os.path.splitext(spectral_grid)[0] + '_area.tif'

    # Define folder structure
    grid_title = os.path.splitext(os.path.split(grid_raster)[1])[0]
    mosaic_location, mosaic_name = os.path.split(mosaic_raster)

    # Create source folder within mosaic location if it does not already exist
    source_folder = os.path.join(mosaic_location, 'sources')
    if os.path.exists(source_folder) == 0:
        os.mkdir(source_folder)

    # Create an empty list to store existing extracted source rasters for the grid
    input_length = len(tile_inputs)
    input_rasters = []

    # Identify raster extent of grid
    print(f'\tExtracting {input_length} spectral tiles...')
    grid_extent = Raster(grid_raster).extent
    grid_array = arcpy.Array()
    grid_array.add(arcpy.Point(grid_extent.XMin, grid_extent.YMin))
    grid_array.add(arcpy.Point(grid_extent.XMin, grid_extent.YMax))
    grid_array.add(arcpy.Point(grid_extent.XMax, grid_extent.YMax))
    grid_array.add(arcpy.Point(grid_extent.XMax, grid_extent.YMin))
    grid_array.add(arcpy.Point(grid_extent.XMin, grid_extent.YMin))
    grid_polygon = arcpy.Polygon(grid_array)

    # Save grid polygon
    grid_feature = os.path.join(work_geodatabase, 'grid_polygon')
    arcpy.management.CopyFeatures(grid_polygon, grid_feature)
    arcpy.management.DefineProjection(grid_feature, output_system)

    # Iterate through all input tiles and extract to grid if they overlap
    count = 1
    for raster in tile_inputs:
        output_raster = os.path.join(source_folder, os.path.split(raster)[1])
        if os.path.exists(output_raster) == 0:
            # Identify raster extent of tile
            tile_extent = Raster(raster).extent
            tile_array = arcpy.Array()
            tile_array.add(arcpy.Point(tile_extent.XMin, tile_extent.YMin))
            tile_array.add(arcpy.Point(tile_extent.XMin, tile_extent.YMax))
            tile_array.add(arcpy.Point(tile_extent.XMax, tile_extent.YMax))
            tile_array.add(arcpy.Point(tile_extent.XMax, tile_extent.YMin))
            tile_array.add(arcpy.Point(tile_extent.XMin, tile_extent.YMin))
            tile_polygon = arcpy.Polygon(tile_array)

            # Save tile polygon
            tile_feature = os.path.join(work_geodatabase, 'tile_polygon')
            arcpy.CopyFeatures_management(tile_polygon, tile_feature)
            arcpy.DefineProjection_management(tile_feature, output_system)

            # Select tile extent with grid extent
            selection = int(
                arcpy.GetCount_management(
                    arcpy.management.SelectLayerByLocation(
                        tile_feature, 'INTERSECT', grid_feature, '',
                        'NEW_SELECTION', 'NOT_INVERT')).getOutput(0))

            # If tile overlaps grid then perform extraction
            if selection == 1:
                # Extract raster to mask
                print(
                    f'\t\tExtracting spectral tile {count} of {input_length}...'
                )
                iteration_start = time.time()
                extract_raster = ExtractByMask(raster, grid_raster)
                # Copy extracted raster to output
                print(f'\t\tSaving spectral tile {count} of {input_length}...')
                arcpy.management.CopyRaster(extract_raster, output_raster, '',
                                            '0', '-32768', 'NONE', 'NONE',
                                            '16_BIT_SIGNED', 'NONE', 'NONE',
                                            'TIFF', 'NONE', 'CURRENT_SLICE',
                                            'NO_TRANSPOSE')
                # End timing
                iteration_end = time.time()
                iteration_elapsed = int(iteration_end - iteration_start)
                iteration_success_time = datetime.datetime.now()
                # Report success
                print(
                    f'\t\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
                )
                print('\t\t----------')
            # If tile does not overlap grid then report message
            else:
                print(
                    f'\t\tSpectral tile {count} of {input_length} does not overlap grid...'
                )
                print('\t\t----------')

            # Remove tile feature class
            if arcpy.Exists(tile_feature) == 1:
                arcpy.management.Delete(tile_feature)

        # If extracted tile already exists then report message
        else:
            print(
                f'\t\tExtracted spectral tile {count} of {input_length} already exists...'
            )
            print('\t\t----------')

        # If the output raster exists then append it to the raster list
        if os.path.exists(output_raster) == 1:
            input_rasters.append(output_raster)
        count += 1

    # Remove grid feature
    if arcpy.Exists(grid_feature) == 1:
        arcpy.management.Delete(grid_feature)
    print(f'\tFinished extracting {input_length} spectral tiles.')
    print('\t----------')

    # Mosaic raster tiles to new raster
    print(f'\tMosaicking the input rasters for {grid_title}...')
    iteration_start = time.time()
    arcpy.management.MosaicToNewRaster(input_rasters, mosaic_location,
                                       mosaic_name, output_system,
                                       '16_BIT_SIGNED', cell_size, '1',
                                       'MAXIMUM', 'FIRST')
    # Enforce correct projection
    arcpy.management.DefineProjection(mosaic_raster, output_system)
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')

    # Calculate the missing area
    print('\tCalculating null space...')
    iteration_start = time.time()
    raster_null = SetNull(IsNull(Raster(mosaic_raster)), 1, 'VALUE = 1')
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')

    # Impute missing data by nibbling the NoData from the focal mean
    print('\tImputing missing values by geographic nearest neighbor...')
    iteration_start = time.time()
    raster_filled = Nibble(Raster(mosaic_raster), raster_null, 'DATA_ONLY',
                           'PROCESS_NODATA', '')
    # Copy nibble raster to output
    print(f'\tSaving filled raster...')
    arcpy.management.CopyRaster(raster_filled, nibble_raster, '', '0',
                                '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                'NONE', 'NONE', 'TIFF', 'NONE',
                                'CURRENT_SLICE', 'NO_TRANSPOSE')
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')

    # Remove overflow fill from the study area
    print('\tRemoving overflow fill from study area...')
    iteration_start = time.time()
    raster_preliminary = ExtractByMask(nibble_raster, study_area)
    # Copy preliminary extracted raster to output
    arcpy.management.CopyRaster(raster_preliminary, spectral_area, '', '0',
                                '-32768', 'NONE', 'NONE', '16_BIT_SIGNED',
                                'NONE', 'NONE', 'TIFF', 'NONE',
                                'CURRENT_SLICE', 'NO_TRANSPOSE')
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')

    # Remove overflow fill from the grid
    print('\tRemoving overflow fill from grid...')
    iteration_start = time.time()
    raster_final = ExtractByMask(spectral_area, grid_raster)
    arcpy.management.CopyRaster(raster_final, spectral_grid, '', '0', '-32768',
                                'NONE', 'NONE', '16_BIT_SIGNED', 'NONE',
                                'NONE', 'TIFF', 'NONE', 'CURRENT_SLICE',
                                'NO_TRANSPOSE')
    # Delete intermediate rasters
    if arcpy.Exists(mosaic_raster) == 1:
        arcpy.management.Delete(mosaic_raster)
    if arcpy.Exists(nibble_raster) == 1:
        arcpy.management.Delete(nibble_raster)
    if arcpy.Exists(spectral_area) == 1:
        arcpy.management.Delete(spectral_area)
    # End timing
    iteration_end = time.time()
    iteration_elapsed = int(iteration_end - iteration_start)
    iteration_success_time = datetime.datetime.now()
    # Report success
    print(
        f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})'
    )
    print('\t----------')
    out_process = f'Successfully created {os.path.split(spectral_grid)[1]}'
    return out_process
Exemplo n.º 32
0
def CalculateVegetationHeight(MNH, emprise, idField, geodata, OutputFC):

    # Local variables:
    env.workspace = geodata

    # Process: Extraction par masque
    pathExtract = os.path.join(geodata, "EctractMNH")
    Extract_MNH = ExtractByMask(MNH, emprise)
    Extract_MNH.save(pathExtract)

    # Process: Raster vers points
    arcpy.RasterToPoint_conversion(Extract_MNH, "MNHPoint", "Value")

    # Process: Jointure spatiale
    # Création de l'objet fieldmappings
    fmap = arcpy.FieldMappings()

    # on ajoute les tables
    fmap.addTable("MNHPoint")
    fmap.addTable(emprise)

    # on cherche l'indice de la du grid_code et on créer un fieldmap pour chaque indice (mean,
    idexGridCode = fmap.findFieldMapIndex("grid_code")

    # On créer les fieldmap pour chaque colonne
    fieldMapGCmean = fmap.getFieldMap(idexGridCode)

    fieldMapGCmin = arcpy.FieldMap()
    fieldMapGCmin.addInputField("MNHPoint","grid_code")

    fieldMapGCmax = arcpy.FieldMap()
    fieldMapGCmax.addInputField("MNHPoint","grid_code")

    fieldMapGCmedian = arcpy.FieldMap()
    fieldMapGCmedian.addInputField("MNHPoint","grid_code")

    fieldMapGCstdv = arcpy.FieldMap()
    fieldMapGCstdv.addInputField("MNHPoint","grid_code")

    # On calcul le champ Min_height
    Minfld = fieldMapGCmin.outputField
    Minfld.name = "Min_height"
    Minfld.aliasName = "Min_height"
    fieldMapGCmin.outputField = Minfld
    fieldMapGCmin.mergeRule = "Min"
    fmap.addFieldMap(fieldMapGCmin)

    # On calcul le champ Max_Height
    Maxfld =fieldMapGCmax.outputField
    Maxfld.name = "Max_height"
    Maxfld.aliasName = "Max_height"
    fieldMapGCmax.outputField = Maxfld
    fieldMapGCmax.mergeRule = "Max"
    fmap.addFieldMap(fieldMapGCmax)

    # On calcul le champ Median_height
    MedianFld = fieldMapGCmedian.outputField
    MedianFld.name = "Med_height"
    MedianFld.aliasName = "Med_height"
    fieldMapGCmedian.outputField = MedianFld
    fieldMapGCmedian.mergeRule = "Median"
    fmap.addFieldMap(fieldMapGCmedian)

    # On calcul le champ stdv_height
    StdvFld = fieldMapGCstdv.outputField
    StdvFld.name= "Stdv_height"
    StdvFld.aliasName = "Stdv_height"
    fieldMapGCstdv.outputField=StdvFld
    fieldMapGCstdv.mergeRule="StdDev"
    fmap.addFieldMap(fieldMapGCstdv)

    # On calcul le champ Mean_height
    Meanfld = fieldMapGCmean.outputField
    Meanfld.name ="Mean_height"
    Meanfld.aliasName = "Mean_height"
    fieldMapGCmean.outputField = Meanfld
    fieldMapGCmean.mergeRule ="Mean"
    fmap.replaceFieldMap(idexGridCode, fieldMapGCmean)

    arcpy.SpatialJoin_analysis(emprise, "MNHPoint", OutputFC, "", "", fmap)

    # Process: Calculer un champ
    arcpy.AddField_management(OutputFC, "Variance_height", "DOUBLE")
    arcpy.CalculateField_management(OutputFC, "Variance_height", "[Stdv_height]*[Stdv_height]")

    # Delete Field:
    for fld in arcpy.ListFields(OutputFC):
        if fld.name not in [idField,"Min_height","Max_height","Med_height","Stdv_height","Mean_height","Join_Count","Variance_height"]:
            try:
                arcpy.DeleteField_management(OutputFC,fld.name)
            except:
                pass

    # Return outputFC
    return OutputFC