コード例 #1
0
ファイル: canopy.py プロジェクト: ocsmit/canopy
def update_gtpoints(config, old_points, phyreg_ids):
    '''
    This function copies a previous years GT points and copies the
    points but with the new years GT values. It addtionally corrects the
    values if they are within an inverted region.

    Parameters
    ----------
    config :
        CanoPy configuration object
    old_points : str
        Layer name for the previous years points
    phyreg_ids : list
        list of physiographic region IDs to process
    '''
    phyregs_layer = config.phyregs_layer
    naipqq_layer = config.naipqq_layer
    spatref_wkid = config.spatref_wkid
    analysis_year = config.analysis_year
    results_path = config.results_path

    arcpy.env.overwriteOutput = True
    arcpy.env.addOutputsToMap = False
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(spatref_wkid)

    # use configparser converter to read list
    conf = ConfigParser(
        converters={'list': lambda x: [int(i.strip()) for i in x.split(',')]})
    conf.read(config.config)
    inverted_reg = conf.getlist('config', 'inverted_phyreg_ids')

    # make sure to clear selection because most geoprocessing tools use
    # selected features, if any
    arcpy.SelectLayerByAttribute_management(naipqq_layer, 'CLEAR_SELECTION')

    # select phyregs features to process
    arcpy.SelectLayerByAttribute_management(phyregs_layer,
                                            where_clause='PHYSIO_ID in (%s)' %
                                            ','.join(map(str, phyreg_ids)))
    with arcpy.da.SearchCursor(phyregs_layer, ['NAME', 'PHYSIO_ID']) as cur:
        for row in cur:
            name = row[0]
            print(name)
            # CreateRandomPoints cannot create a shapefile with - in its
            # filename
            name = name.replace(' ', '_').replace('-', '_')
            phyreg_id = row[1]
            # Check if region is inverted
            if row[1] in inverted_reg:
                inverted = True
            else:
                inverted = False

            outdir_path = '%s/%s/Outputs' % (results_path, name)
            shp_filename = 'gtpoints_%d_%s.shp' % (analysis_year, name)

            tmp_shp_filename = 'tmp_%s' % shp_filename
            tmp_shp_path = '%s/%s' % (outdir_path, tmp_shp_filename)

            # create random points
            arcpy.SelectLayerByAttribute_management(
                phyregs_layer, where_clause='PHYSIO_ID=%d' % phyreg_id)

            # create a new field to store data for ground truthing
            gt_field = 'GT_%s' % analysis_year
            arcpy.CopyFeatures_management(old_points, tmp_shp_path)
            arcpy.AddField_management(tmp_shp_path, gt_field, 'SHORT')

            # spatially join the naip qq layer to random points to find
            # output tile filenames
            shp_path = '%s/%s' % (outdir_path, shp_filename)
            arcpy.SpatialJoin_analysis(tmp_shp_path, naipqq_layer, shp_path)

            # delete temporary point shapefile
            arcpy.Delete_management(tmp_shp_path)

            # get required fields from spatially joined point layer
            with arcpy.da.UpdateCursor(
                    shp_path, ['SHAPE@XY', gt_field, 'FileName']) as cur2:
                for row2 in cur2:
                    # read filename
                    filename = row2[2][:-13]
                    # construct the final output tile path
                    cfrtiffile_path = '%s/cfr%s.tif' % (outdir_path, filename)
                    # read the output tile as raster
                    ras = arcpy.sa.Raster(cfrtiffile_path)
                    # resolution
                    res = (ras.meanCellWidth, ras.meanCellHeight)
                    # convert raster to numpy array to read cell values
                    ras_a = arcpy.RasterToNumPyArray(ras)
                    # get xy values of point
                    xy = row2[0]
                    # perform calculate_row_column to get the row and column
                    # of the point
                    rc = __calculate_row_column(xy, ras.extent, res)
                    # update the point, correct inverted region points
                    if inverted is True:
                        row2[1] = 1 - ras_a[rc]
                        cur2.updateRow(row2)
                    else:
                        row2[1] = ras_a[rc]
                        cur2.updateRow(row2)

            # delete all fields except only those required
            shp_desc = arcpy.Describe(shp_path)
            oid_field = shp_desc.OIDFieldName
            shape_field = shp_desc.shapeFieldName

            all_fields = arcpy.ListFields(shp_path)
            required_fields = [oid_field, shape_field, gt_field]
            extra_fields = [
                x.name for x in all_fields if x.name not in required_fields
            ]
            arcpy.DeleteField_management(shp_path, extra_fields)

    # clear selection again
    arcpy.SelectLayerByAttribute_management(phyregs_layer, 'CLEAR_SELECTION')

    print('Completed')
コード例 #2
0
ファイル: canopy.py プロジェクト: ocsmit/canopy
 def __init__(self, arc_raster, nodata=3):
     self.region_array = arcpy.RasterToNumPyArray(arc_raster,
                                                  nodata_to_value=nodata)
     self.nodata = nodata
     self.check(self.region_array)
コード例 #3
0
ファイル: canopy.py プロジェクト: ocsmit/canopy
def generate_gtpoints(config, phyreg_ids, min_area_sqkm, max_area_sqkm,
                      min_points, max_points):
    '''
    This function generates randomized points for ground truthing. It create
    the GT field in the output shapefile.

    Parameters
    ----------
    config :
        CanoPy configuration object
    phyreg_ids : list
        list of physiographic region IDs to process
    min_area_sqkm : float
        miminum area in square kilometers
    max_area_sqkm : float
        maximum area in square kilometers
    min_points : int
        minimum number of points allowed
    max_points : int
        maximum number of points allowed
    '''
    # fix user errors, if any
    if min_area_sqkm > max_area_sqkm:
        tmp = min_area_sqkm
        min_area_sqkm = max_area_sqkm
        max_area_sqkm = tmp

    if min_points > max_points:
        tmp = min_points
        min_points = max_points
        max_points = tmp

    phyregs_layer = config.phyregs_layer
    phyregs_area_sqkm_field = config.phyregs_area_sqkm_field
    naipqq_layer = config.naipqq_layer
    spatref_wkid = config.spatref_wkid
    analysis_year = config.analysis_year
    results_path = config.results_path

    arcpy.env.overwriteOutput = True
    arcpy.env.addOutputsToMap = False
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(spatref_wkid)

    # use configparser converter to read list
    conf = ConfigParser(
        converters={'list': lambda x: [int(i.strip()) for i in x.split(',')]})
    conf.read(config.config)
    inverted_reg = conf.getlist('config', 'inverted_phyreg_ids')

    # make sure to clear selection because most geoprocessing tools use
    # selected features, if any
    arcpy.SelectLayerByAttribute_management(naipqq_layer, 'CLEAR_SELECTION')

    # select phyregs features to process
    arcpy.SelectLayerByAttribute_management(phyregs_layer,
                                            where_clause='PHYSIO_ID in (%s)' %
                                            ','.join(map(str, phyreg_ids)))
    with arcpy.da.SearchCursor(
            phyregs_layer,
        ['NAME', 'PHYSIO_ID', phyregs_area_sqkm_field]) as cur:
        for row in cur:
            name = row[0]
            print(name)
            # CreateRandomPoints cannot create a shapefile with - in its
            # filename

            name = name.replace(' ', '_').replace('-', '_')
            phyreg_id = row[1]
            area_sqkm = row[2]

            # Check if region is inverted
            if row[1] in inverted_reg:
                inverted = True
            else:
                inverted = False

            # +1 to count partial points; e.g., 0.1 requires one point
            point_count = int(min_points + (max_points - min_points) /
                              (max_area_sqkm - min_area_sqkm) *
                              (area_sqkm - min_area_sqkm) + 1)
            print('Raw point count: %d' % point_count)
            if point_count < min_points:
                point_count = min_points
            elif point_count > max_points:
                point_count = max_points
            print('Final point count: %d' % point_count)

            outdir_path = '%s/%s/Outputs' % (results_path, name)
            shp_filename = 'gtpoints_%d_%s.shp' % (analysis_year, name)

            tmp_shp_filename = 'tmp_%s' % shp_filename
            tmp_shp_path = '%s/%s' % (outdir_path, tmp_shp_filename)

            # create random points
            arcpy.SelectLayerByAttribute_management(
                phyregs_layer, where_clause='PHYSIO_ID=%d' % phyreg_id)
            arcpy.CreateRandomPoints_management(outdir_path, tmp_shp_filename,
                                                phyregs_layer, '', point_count)

            # create a new field to store data for ground truthing
            gt_field = 'GT'
            arcpy.AddField_management(tmp_shp_path, gt_field, 'SHORT')

            # spatially join the naip qq layer to random points to find
            # output tile filenames
            shp_path = '%s/%s' % (outdir_path, shp_filename)
            arcpy.SpatialJoin_analysis(tmp_shp_path, naipqq_layer, shp_path)

            # delete temporary point shapefile
            arcpy.Delete_management(tmp_shp_path)

            # get required fields from spatially joined point layer
            with arcpy.da.UpdateCursor(
                    shp_path, ['SHAPE@XY', gt_field, 'FileName']) as cur2:
                for row2 in cur2:
                    # read filename
                    filename = row2[2][:-13]
                    # construct the final output tile path
                    cfrtiffile_path = '%s/cfr%s.tif' % (outdir_path, filename)
                    # read the output tile as raster
                    ras = arcpy.sa.Raster(cfrtiffile_path)
                    # resolution
                    res = (ras.meanCellWidth, ras.meanCellHeight)
                    # convert raster to numpy array to read cell values
                    ras_a = arcpy.RasterToNumPyArray(ras)
                    # get xy values of point
                    xy = row2[0]
                    # perform calculate_row_column to get the row and column
                    # of the point
                    rc = config.__calculate_row_column(xy, ras.extent, res)
                    # update the point, correct inverted region points
                    if inverted is True:
                        row2[1] = 1 - ras_a[rc]
                        cur2.updateRow(row2)
                    else:
                        row2[1] = ras_a[rc]
                        cur.updateRow(row2)

            # delete all fields except only those required
            shp_desc = arcpy.Describe(shp_path)
            oid_field = shp_desc.OIDFieldName
            shape_field = shp_desc.shapeFieldName

            all_fields = arcpy.ListFields(shp_path)
            required_fields = [oid_field, shape_field, gt_field]
            extra_fields = [
                x.name for x in all_fields if x.name not in required_fields
            ]
            arcpy.DeleteField_management(shp_path, extra_fields)

    # clear selection again
    arcpy.SelectLayerByAttribute_management(phyregs_layer, 'CLEAR_SELECTION')

    print('Completed')
コード例 #4
0
ファイル: mask_fp_nlcd_ytc.py プロジェクト: mbougie/gibbs
def execute_task(args):
    in_extentDict, data, traj_list, cls, rws = args

    fc_count = in_extentDict[0]

    procExt = in_extentDict[1]
    # print procExt
    XMin = procExt[0]
    YMin = procExt[1]
    XMax = procExt[2]
    YMax = procExt[3]

    #set environments
    arcpy.env.snapRaster = data['pre']['traj']['path']
    arcpy.env.cellsize = data['pre']['traj']['path']
    arcpy.env.outputCoordinateSystem = data['pre']['traj']['path']
    arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax)

    print 'rws==================================', rws
    print 'cls==================================', cls

    # outData = numpy.zeros((rows,cols), numpy.int16)
    outData = np.zeros((rws, cls), dtype=np.uint16)

    ### create numpy arrays for input datasets nlcds and traj
    nlcds = {
        1992:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_1992',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
        2001:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2001',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
        2006:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2006',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
        2011:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2011',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
    }

    arr_traj = arcpy.RasterToNumPyArray(
        in_raster=data['pre']['traj_yfc']['path'],
        lower_left_corner=arcpy.Point(XMin, YMin),
        nrows=rws,
        ncols=cls)

    #### find the location of each pixel labeled with specific arbitray value in the rows list
    #### note the traj_list is derived from the sql query above
    for row in traj_list:

        traj = row[0]
        ytc = row[1]
        # print 'yfc', yfc

        #Return the indices of the pixels that have values of the ytc arbitray values of the traj.
        indices = (arr_traj == row[0]).nonzero()

        #stack the indices variable above so easier to work with
        stacked_indices = np.column_stack((indices[0], indices[1]))

        #get the x and y location of each pixel that has been selected from above
        for pixel_location in stacked_indices:
            ##create a nlcdlist to store the nlcd values associated with EACH yfc pixel
            nlcd_list = []

            row = pixel_location[0]
            col = pixel_location[1]

            if ytc < 2012:
                nlcd_list.append(nlcds[2001][row][col])
                nlcd_list.append(nlcds[2006][row][col])
            else:
                nlcd_list.append(nlcds[2006][row][col])
                nlcd_list.append(nlcds[2011][row][col])

            # print 'nlcd_list', nlcd_list
            # #get the length of nlcd list containing only the value 82
            # ##82 = cultivated crop
            count_82 = nlcd_list.count(82)

            ##label the pixel ############################################################
            if count_82 > 0:
                outData[row, col] = data['refine']['arbitrary_crop']

    arcpy.ClearEnvironment("extent")

    outname = "tile_" + str(fc_count) + '.tif'

    # #create
    outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles",
                           outname)

    # NumPyArrayToRaster (in_array, {lower_left_corner}, {x_cell_size}, {y_cell_size}, {value_to_nodata})
    myRaster = arcpy.NumPyArrayToRaster(outData,
                                        lower_left_corner=arcpy.Point(
                                            XMin, YMin),
                                        x_cell_size=30,
                                        y_cell_size=30,
                                        value_to_nodata=0)

    ##free memory from outdata array!!
    outData = None

    myRaster.save(outpath)

    myRaster = None
コード例 #5
0
ファイル: canopy.py プロジェクト: ocsmit/canopy
def __weighted_ob(name_list, naip, nlcd_region, region_lc):
    '''
    Weighted function as described in docstring of objective_function.
    Will have longer computational time as it will iterate over each tile 20
    times.
    '''
    #######################################################################
    # TODO:
    # Is there a better way to iterate over the tiles in the weighted
    # function? Maybe it would be best to take only the tiles which fall
    # within a certain tolerance of the lowest tile after the first
    # iteration? Doing that would reduce the amount of iterations from
    # n * 20 to n + (n_{F \in [F_0, F_0 + t]} * 20)) where n is the number
    # of tiles, F is the objective function value, and t is the tolerance
    # value.
    #######################################################################

    # Initialize dictionary for all weighted tiles.
    weighted_tiles = {}
    # 20 iterations for 20 NLCD classes.
    for weight in range(21):
        # Index dictonary for iteration weight_i
        out_index = {}
        for i in sorted(name_list):
            # Select tile i
            arcpy.SelectLayerByAttribute_management(naip, "NEW_SELECTION",
                                                    f"OBJECTID = {i}")
            # Get local NLCD
            nlcd_tile = arcpy.sa.ExtractByMask(nlcd_region, naip)
            # Convert to numpy array and get counts of values.
            tile_arr = arcpy.RasterToNumPyArray(nlcd_tile)
            tile_unique, tile_counts = np.unique(tile_arr, return_counts=True)
            tile_lc = dict(zip(tile_unique, tile_counts))
            # Remove nodata
            ras = arcpy.Raster(nlcd_tile)
            nan = ras.noDataValue
            del tile_lc[nan]
            # Compute weighted minimization value
            d = []
            for j in region_lc.keys():
                if j in tile_unique:
                    G = region_lc.get(j) / sum(region_lc.values())
                    L = tile_lc.get(j) / sum(tile_lc.values())
                    c = (G - L)**2 + weight * (len(region_lc) / 20 -
                                               len(tile_lc) / 20)**2
                    d.append(c)
                else:
                    G = region_lc.get(j) / sum(region_lc.values())
                    c = (G - 0)**2 + weight * (len(region_lc) / 20 -
                                               len(tile_lc) / 20)**2
                    d.append(c)

            # For each class value it is added to list 'd' and then summed at
            # the of each iteration.
            out_index.update({i: math.fsum(d)})
        # Sort out dictonary
        sort_func = {
            k: v
            for k, v in sorted(out_index.items(), key=lambda item: item[1])
        }
        # Add the tile with the lowest function value to a new list.
        weighted_tiles.update({
            weight:
            [list(sort_func.items())[0][0],
             list(sort_func.items())[0][1]]
        })
    training_tile = weighted_tiles
    return training_tile
コード例 #6
0
ファイル: HF_precip_one.py プロジェクト: dgketchum/etrm
        for day in rrule.rrule(rrule.DAILY, dtstart=start, until=end):
            folder = "C:\\Recharge_GIS\\Precip\\800m\\Daily\\"
            yr = day.year
            if yr <= 1991:
                arcpy.env.overwriteOutput = True      # Ensure overwrite capability
                arcpy.env.workspace = folder + str(day.year) + "a"
                ras = folder + str(day.year) + "a\\" + "PRISM_NM_" + str(day.year) + day.strftime('%m') + day.strftime('%d') + ".tif"
                if arcpy.Exists(ras):
                    try:
                        arcpy.CheckOutExtension("Spatial")
                        mask = "C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrs_11DEC15"
                        rasPart = arcpy.sa.ExtractByMask(ras, geo)
                        if day == beginPrecip:
                            rasPart.save(folder + str(day.year) + "a\\" + str(gPoly) + "_rasterClipTest.tif")
                        arr = arcpy.RasterToNumPyArray(rasPart, nodata_to_value=0)
                        arrVal = np.multiply(arr, rasSq)
                        arrSum = arrVal.sum()
                        print "Sum of precip on " + str(day) + ":  " + str(arrSum)
                        precip.append(arrSum)
                        date.append(day)
                    except:
                        pass
            if yr > 1991:
                arcpy.env.workspace = folder + str(day.year)
                ras = folder + str(day.year) + "\\" + "PRISM_NMHW2Buff_" + str(day.year) + day.strftime('%m') + day.strftime('%d') + ".tif"
                if arcpy.Exists(ras):
                    try:
                        arcpy.CheckOutExtension("Spatial")
                        mask = "C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrs_11DEC15"
                        rasPart = arcpy.sa.ExtractByMask(ras, geo)
def TemperatureSeparation_NoShadow(dir_LAI, dir_RGBNIR, dir_Tr, dir_DSM,
                                   NoDataValue, Veg_threshold, Soil_threshold,
                                   band_R, band_NIR, 
                                   cellsize_resample,
                                   dir_output, output_name_multiple, output_name_single,
                                   Azimuth, Altitude, 
                                   MiddleProducts="No"):
    '''
    The effect of shadow is considered in this function, and "Hillshade" tool from the ArcMap must be done based on the high resolution DSM data. 
    If DSM data is upscaled from 0.15 meter to 0.6 meter, details are smoothed. Normally, one 0.6 meter by 0.6 meter grid contains small number of 
    shadow pixel (e.g., 3 out of 16 pixels). Therefore, this function assumpe the 0.6 meter by 0.6 meter pixel as shadow pixel once
    this piexl contains at least one shadow pixel at 0.15 meter by 0.15 meter.
    Note: To run this function successfully, one function is needed: the function called TellResolution() can be found here: 
            https://github.com/RuiGao9/Rui_Functions_Package/blob/main/TellResolution.py
    
    Parameters used in this function:
    dir_LAI: the file path of the LAI image, resolution is 3.6 meter by 3.6 meter.
    dir_RGBNIR: the file path of the optical image containing R, G, B, and NIR bands, and the resolution is 0.15 meter b 0.15 meter.
    dir_Tr: the file path of the temperature image in unith of degree C, and the resolution is 0.6 meter by 0.6 meter.
    dir_DSM: the file path of the DSM image in meter and the resolution is 0.15 meter by 0.15 meter.
    NoDataValue: assigne a value to represent the NaN value.
    Veg_threshold: any NDVI pixel value above this threshold represents vegetation pixels.
    Soil_threshold: any NDVI pixel value below this threshold represents soil pixels.
    band_R: the number of layer in the optical image (multiple bands) representing the Red band.
    band_NIR: the number of layer in the optical image (multiple bands) representing the Near-infrared band.
    cellsize_resample: 0.6 meter by 0.6 meter resolution in order to calculate the vine shadow and temperature separation.
    dir_output: the folder which will have the generated files. 
    output_name_multiple: the name of the separated temperature result.
    output_name_single: the name of the single-layer temperature result - temperature pixel on the shadow pixel is deleted.
    Azimuth: a parameter used for vine shadow calculation.
    Altitude: a parameter used for vine shadow calculation.
    MiddleProducts: default is "No", which means to delete the middle products. Other parameters, like "Yes" will save the middle products.
    '''
    # import libraries
    import arcpy
    import gdal
    import os
    import numpy as np
    import pandas as pd
    from scipy.stats import linregress
    import matplotlib.pyplot as plt
    
    # RGB-NIR image processing
    # resample the optical image
    [res_x,res_y] = TellResolution(dir_RGBNIR)
    name_resample = "resample_RGBNIR.tif"
    arcpy.Resample_management(in_raster=dir_RGBNIR, 
                              out_raster=dir_output+"\\"+name_resample,
                              cell_size=str(cellsize_resample)+" "+str(cellsize_resample), 
                              resampling_type="BILINEAR")
    # clip the optical and thermal image the same as the LAI
    extent = TellExtent(dir_LAI)
    name_clip_opt = "clip_RGBNIR.tif"
    arcpy.Clip_management(in_raster=dir_output+"\\"+name_resample, 
                          rectangle=extent, 
                          out_raster=dir_output+"\\"+name_clip_opt, 
                          in_template_dataset=dir_LAI, 
                          nodata_value=NoDataValue, 
                          clipping_geometry="NONE", 
                          maintain_clipping_extent="MAINTAIN_EXTENT")

    # Temperature image processing
    # Assuming the resolution is 0.6 meter by 0.6 meter
    # The unit now is degree C
    name_clip_tr = "clip_tr.tif"
    arcpy.Clip_management(in_raster=dir_Tr, 
                          rectangle=extent, 
                          out_raster=dir_output+"\\"+name_clip_tr, 
                          in_template_dataset=dir_LAI, 
                          nodata_value=NoDataValue, 
                          clipping_geometry="NONE", 
                          maintain_clipping_extent="MAINTAIN_EXTENT")

    # Shadow pixels identifying based on DSM image
    # Hillshade calculation based on the cliped RGBNIR data
    name_hillshade = "Hillshade.tif"
    arcpy.gp.HillShade_sa(dir_DSM, 
                          dir_output+"\\"+name_hillshade, 
                          str(Azimuth), 
                          str(Altitude), 
                          "SHADOWS", "1")
    # Aggregate the Hillshade image,
    # If the 0.6 meter grid contains at least one 0.15 meter shadow pixel,
    # this 0.6 meter grid is shadow pixel
    name_aggregate = "Aggregate.tif"
    arcpy.gp.Aggregate_sa(dir_output+"\\"+name_hillshade, 
                          dir_output+"\\"+name_aggregate, 
                          "4", 
                          "MINIMUM", 
                          "EXPAND", "DATA")
    # 4 is a constant value here, since the resolution for the original DSM and Temperature image is 0.15 and 0.6 meter resolution.
    # Clip the aggregated DSM data
    # This image can be used to identify the shadow pixel: 0 represents the shadow
    name_clip_dsm = "clip_aggregate.tif"
    arcpy.Clip_management(in_raster=dir_output+"\\"+name_aggregate, 
                          rectangle=extent, 
                          out_raster=dir_output+"\\"+name_clip_dsm, 
                          in_template_dataset=dir_LAI, 
                          nodata_value=NoDataValue, 
                          clipping_geometry="NONE", 
                          maintain_clipping_extent="MAINTAIN_EXTENT")
    # Read this image as an array
    # Classify this image containing shadow pixel and non-shadow pixel
    Array_Shadow = arcpy.RasterToNumPyArray(dir_output+"\\"+name_clip_dsm, nodata_to_value=NoDataValue)
    Array_Shadow[Array_Shadow>0] = 1 # non-shadow pixel
    # print(np.nanmax(Array_Shadow),np.nanmin(Array_Shadow))

    # Temperature separation
    Array_LAI = arcpy.RasterToNumPyArray(dir_LAI, nodata_to_value=NoDataValue)
    Array_RGBNIR = arcpy.RasterToNumPyArray(dir_output+"\\"+name_clip_opt, nodata_to_value=NoDataValue)
    Array_R = Array_RGBNIR[band_R,:,:]
    Array_NIR = Array_RGBNIR[band_NIR,:,:]

    Array_NDVI = (Array_NIR-Array_R)/(Array_NIR+Array_R)
    Array_NDVI[Array_NDVI<0] = np.nan
    Array_NDVI[Array_NDVI>1] = np.nan
    Array_Tr = arcpy.RasterToNumPyArray(dir_output+"\\"+name_clip_tr, nodata_to_value=NoDataValue)
    Array_Tr[Array_Tr<0] = np.nan
    print(Array_LAI.shape, Array_RGBNIR.shape, Array_R.shape, Array_NIR.shape, Array_Tr.shape)
    print(round(np.nanmax(Array_NDVI),1),round(np.nanmin(Array_NDVI),1))

    # Stefan-Boltzmann Law
    Array_Tr = Array_Tr ** 4

    dims_LAI = Array_LAI.shape
    print("Column of the LAI:",dims_LAI[0],"Row of the LAI:",dims_LAI[1])
    dims_NDVI = Array_NDVI.shape
    print("Column of the spectral data:",dims_NDVI[0],"Row of the spectral data:",dims_NDVI[1])
    hor_pixel = int(dims_NDVI[0]/dims_LAI[0])
    ver_pixel = int(dims_NDVI[1]/dims_LAI[1])
    print("Each LAI pixel contains",hor_pixel,"(column/column) by",ver_pixel,"(row/row) pixels.")

    ## Main part of the temperature separation
    # Get the information from LAI map for data output
    fid=gdal.Open(dir_LAI)
    input_lai=fid.GetRasterBand(1).ReadAsArray()
    dims_lai=input_lai.shape
    # Read the GDAL GeoTransform to get the pixel size
    lai_geo=fid.GetGeoTransform()
    lai_prj=fid.GetProjection()
    fid=None
    # Compute the dimensions of the output file
    geo_out=list(lai_geo)
    geo_out=tuple(geo_out)

    t_canopy = np.empty((dims_LAI[0],dims_LAI[1]))
    t_canopy[:] = np.nan
    t_soil = np.empty((dims_LAI[0],dims_LAI[1]))
    t_soil[:] = np.nan
    t_coeff = np.empty((dims_LAI[0],dims_LAI[1]))
    t_coeff[:] = np.nan
    print("Dimension of the canopy temperature is:",t_canopy.shape[0],t_canopy.shape[1])
    print("Dimension of the soil temperature is:",t_soil.shape[0],t_soil.shape[1])

    # initial values for these four variables
    renew_slope = NoDataValue
    renew_intercept = NoDataValue
    renew_coeff = NoDataValue
    slope = NoDataValue
    intercept = NoDataValue
    correlation = NoDataValue
    pvalue = NoDataValue
    stderr = NoDataValue

    for irow in range(dims_LAI[0]):
        start_row = irow * hor_pixel
        end_row = start_row + (hor_pixel)
        for icol in range(dims_LAI[1]):
            start_col = icol * ver_pixel
            end_col = start_col + (ver_pixel)

            # Using the hillshade to eliminate the shadow pixel
            local_NDVI = Array_NDVI[start_row:end_row,start_col:end_col]
            local_NDVI[local_NDVI < 0] = NoDataValue
            local_Tr = Array_Tr[start_row:end_row,start_col:end_col]
            local_Shadow = Array_Shadow[start_row:end_row,start_col:end_col]
            num_zero = np.count_nonzero(local_Shadow==0)

            local_NDVI = local_NDVI*local_Shadow

            tmp_NDVI = local_NDVI.reshape(-1)
            tmp_NDVI[tmp_NDVI<=0] = np.nan
            tmp_Tr = local_Tr.reshape(-1)
            tmp_Tr = np.sqrt(np.sqrt(tmp_Tr)) # unit in degree C

            df = pd.DataFrame()
            df = pd.DataFrame({'NDVI': tmp_NDVI,'Tr': tmp_Tr})
            df = df.dropna()
            df = df.apply(pd.to_numeric, errors='coerce')

            # do regression if valid data existed in the data frame
            if len(df) != 0:
                slope,intercept,correlation,pvalue,stderr = linregress(df['NDVI'],df['Tr'])        
                # slope: slope of the regression
                # intercept: intercept of the regression line
                # correlation: correlation coefficient
                # pvalue: two-sided p-value for a hypothesis test whose null hypothesis is that the slope is zero
                # stderr: standard error of the estimate
            else: pass

            # renew the slope and the intercept if the slope is negative
            if np.nanmean(slope) < 0:
                renew_slope = slope
                renew_intercept = intercept
                renew_coeff = correlation
            else: pass

            # gain index for soil and canopy pixel for each local domain
            index_soil = np.where(local_NDVI <= Soil_threshold)
            index_veg = np.where(local_NDVI >= Veg_threshold)

            # when the domain contains both vegetation and soil
            if len(index_soil[0]) > 0 and len(index_veg[0]) > 0:
                t_canopy[irow,icol] = np.nanmean(local_Tr[index_veg[0],index_veg[1]])
                t_soil[irow,icol] = np.nanmean(local_Tr[index_soil[0],index_soil[1]])

            # when the domain contains vegetation but no soil: estimate the soil temperature
            elif len(index_soil[0]) == 0 and len(index_veg[0]) > 0:
                t_canopy[irow,icol] = np.nanmean(local_Tr[index_veg[0],index_veg[1]])
                t_soil[irow,icol] = ((renew_intercept + renew_slope * Soil_threshold)**2)**2

            # when the domain contains soil but no vegetation: vegetation temperature is "NAN"
            elif len(index_soil[0]) > 0 and len(index_veg[0]) <= 0:
                t_canopy[irow,icol] = np.nan
                t_soil[irow,icol] = np.nanmean(local_Tr[index_soil[0],index_soil[1]])

            # when the domain contains either pure soil or vegetation
            # estimate the soil and vegetation temperature
            elif len(index_soil[0]) == 0 and len(index_veg[0]) == 0:
                t_canopy[irow,icol] = np.nan
                t_soil[irow,icol] = np.nan

            t_coeff[irow,icol] = renew_coeff

    tt_canopy = np.sqrt(np.sqrt(t_canopy.copy())) + 273.15
    tt_soil = np.sqrt(np.sqrt(t_soil.copy())) + 273.15
    tt_single_layer = (tt_canopy + tt_soil)/2
    # print(tt_canopy,"\n\n",tt_soil)

    # Write the separated temperature file
    driver = gdal.GetDriverByName('GTiff')
    ds = driver.Create(dir_output+"\\"+output_name_multiple, dims_LAI[1], dims_LAI[0], 3, gdal.GDT_Float32)
    ds.SetGeoTransform(geo_out)
    ds.SetProjection(lai_prj)
    band=ds.GetRasterBand(1)
    band.WriteArray(tt_canopy)
    band.SetNoDataValue(NoDataValue)
    band.FlushCache()
    band=ds.GetRasterBand(2)
    band.WriteArray(tt_soil)
    band.SetNoDataValue(NoDataValue)
    band.FlushCache()
    band=ds.GetRasterBand(3)
    band.WriteArray(t_coeff)
    band.SetNoDataValue(NoDataValue)
    band.FlushCache()
    ds = None
    print("Done!!! Temperature separation is finished.")
    # Write the single-layer temperature file (shadow pixel does not account)
    driver = gdal.GetDriverByName('GTiff')
    ds = driver.Create(dir_output+"\\"+output_name_single, dims_LAI[1], dims_LAI[0], 1, gdal.GDT_Float32)
    ds.SetGeoTransform(geo_out)
    ds.SetProjection(lai_prj)
    band=ds.GetRasterBand(1)
    band.WriteArray(tt_single_layer)
    band.SetNoDataValue(NoDataValue)
    band.FlushCache()
    ds = None
    print("Done!!! Single-layer temperature is generated.")
    
    # delete the middle products
    if MiddleProducts == "No":
        os.remove(dir_output+"\\"+name_resample)
        os.remove(dir_output+"\\"+name_clip_opt)
        os.remove(dir_output+"\\"+name_clip_tr)
        os.remove(dir_output+"\\"+name_clip_dsm)
        os.remove(dir_output+"\\"+name_hillshade)
        os.remove(dir_output+"\\"+name_aggregate)
        print("Done!!! Middle products are deleted.")
    else:
        print("Done!!! Middle products are saved.")
        pass
    
    return()
コード例 #8
0
#Overwrite the output if exist
arcpy.env.overwriteOutput = True

reload(sys)
sys.setdefaultencoding('utf8')

# Adjust folder directory
infolder = r"Z:\Temp\CHIRPS\Daily\Max3"
outfolder = r"Z:\Temp\CHIRPS\Daily\Pct3"
arcpy.env.workspace = infolder
rasters = arcpy.ListRasters()

nmpyrys = []
for i in rasters:
    nmpyrys.append(arcpy.RasterToNumPyArray(i))
a = numpy.array(nmpyrys)

nmpyrys_m = []
for i in nmpyrys:
    m = numpy.where(a[0] == 0, 1, 0)
    am = numpy.ma.MaskedArray(i, mask=m)
    nmpyrys_m.append(am)
a_m = numpy.array(nmpyrys_m)
n_998 = numpy.nanpercentile(a_m, 99.8, axis=0)

# Based on lower-left and pixel size global CHIRPS data
arcpy.NumPyArrayToRaster(n_998, arcpy.Point(-180.0000000, -50.0000015),
                         0.050000000745058, 0.050000000745058, 0)

out = arcpy.NumPyArrayToRaster(n_998,
コード例 #9
0
        escena = os.path.join(rutapro, i)
        for r in os.listdir(escena):

            if r.endswith('edges.img'):
                print r

                try:

                    edges = os.path.join(escena, r)
                    Extract = os.path.join(escena, r[:8] + '_exct')
                    Point_values = os.path.join(escena, r[:8] + '_points.shp')

                    # Process: Extract by Mask
                    arcpy.gp.ExtractByMask_sa(dtm_mtn_318_img, edges, Extract)

                    arr = arcpy.RasterToNumPyArray(Extract)
                    array = arr[arr > 0]
                    median = np.median(array)
                    print 'MEDIAN:', median, 'MEAN:', array.mean()

                    # Process: Raster to Point
                    arcpy.RasterToPoint_conversion(edges, Point_values,
                                                   "Value")

                    arcpy.AddField_management(Point_values, "cota", "DOUBLE",
                                              5, 2)

                    with arcpy.da.UpdateCursor(Point_values, "*") as cursor:
                        # For each row, evaluate the WELL_YIELD value (index position
                        # of 0), and update WELL_CLASS (index position of 1)
                        for row in cursor:
コード例 #10
0
ファイル: flatten.py プロジェクト: kdevans2/raster
def flatten_conf(strOriginalPath, strWorkingPath, RASTEXT_, lstYears_, iOpt):
    """
        Options: See options class iOpt
    """
    strPathInYear = edU.get_EVTY(strOriginalPath)
    strPathInConf = edU.get_ConfEV(strOriginalPath)
    arcpy.env.workspace = strWorkingPath
    # lists of inetrmediates which may or may not be deleted at end
    lstIntermed = []

    # Ingest event and confidence bands from orig folder
    arcpy.env.workspace = strPathInYear
    lstYBNames = arcpy.ListRasters()
    intBands = len(lstYBNames)
    print('\n\t' + str(intBands) + ' event band(s) found.')
    arcpy.env.workspace = strWorkPath

    iDesc = gARC.getSimpleDesc(strPathInYear + os.sep + lstYBNames[0])

    print('\n\tIngesting event and confidence bands ...')
    t = time.time()
    arr3D_Conf = arcpy.RasterToNumPyArray(strPathInConf)
    arr3D_Evt = arcpy.RasterToNumPyArray(strPathInYear)

    arr3D_ConfM = np.where(arr3D_Conf >= 20, arr3D_Conf, 0)
    arr3D_EvtM = np.where(arr3D_Conf >= 20, arr3D_Evt, 0)
    print('\t\t', g.elapsed_time(t))

    print('\tRead confidence flags...')
    arrConfFlags = NDarrU.in_list(arr3D_Conf[0, :, :], lstConfidenceFlags)
    arrConfFlags = arrConfFlags.astype(np.int8)

    lstEventFOut, lstArrEventF = [], []
    lstConfFOut, lstArrConfF = [], []
    print('\n\tStarting years...')

    for y in lstYears_:
        t = time.time()
        print('\n\t\t' + str(y))
        if iOpt.bolDoAsMoistureYear:
            # test on moisture year
            arr3D_EvtM_bol = edU.testMoistureYear(arr3D_EvtM, y)
            strFileSuffix = '_Wat'
        else:
            # test on calendar year
            arr3D_EvtM_bol = edU.testYear(arr3D_EvtM, y)
            strFileSuffix = '_Cal'

        if iOpt.bolDoAsMaxConf:
            # filter for max conf event of the year
            arrOutEvent, arrOutConf = flatU.fMaxConfEV(arr3D_EvtM_bol, arr3D_EvtM, arr3D_ConfM)
        else:
            # filter for last event of the year
            arrOutEvent, arrOutConf = flatU.fLastEV(arr3D_EvtM_bol, arr3D_EvtM, arr3D_ConfM)
        arrOutConf = arrOutConf.astype(np.int8)

        # add in Confidence flags
        print('\tFlags...')
        # This will need som adjusting to get the right flags in.
        arrOutEventF = np.where(arrConfFlags, -1, arrOutEvent)
        arrOutConfF = np.where(arrConfFlags, arrConfFlags, arrOutConf)
        arrOutConfF = arrOutConfF.astype(np.int8)

        strEventFOut = 'Event_' + str(y) + strFileSuffix + RASTEXT_
        rastArcU.ArrayToRaster(arrOutEventF, strEventFOut, iDesc, val2NoData=-1, bolVerbose=False)
        lstEventFOut.append(strEventFOut)
        lstArrEventF.append(arrOutEventF)
        lstIntermed.append(strEventFOut)
        strConfFOut = 'Confidence_' + str(y) + strFileSuffix + RASTEXT_
        rastArcU.ArrayToRaster(arrOutConfF, strConfFOut, iDesc, bolVerbose=False)
        lstConfFOut.append(strConfFOut)
        lstArrConfF.append(arrOutConfF)
        lstIntermed.append(strConfFOut)

        print('\t\t\t', g.elapsed_time(t))
        
    # ---------------------------------------------------------
    # Optional outputs
    if iOpt.bolDoMaxConf or iOpt.bolDoSumConf:
        # This is max (highest) and sum of flattened confidences
        # so results will vary with flattening method
        arrOutConfND = np.stack(lstArrConfF)
        if iOpt.bolDoMaxConf:
            print('\tOptional: MaxConf...')
            strMaxConf = 'MaxConf' + strFileSuffix + RASTEXT_
            arrMax = arrOutConfND.max(axis=0)
            arrMaxF = np.where(arrConfFlags, arrConfFlags, arrMax)
            rastArcU.ArrayToRaster(arrMaxF, strMaxConf, iDesc, bolVerbose=False)
            del arrMax, arrMaxF

        if iOpt.bolDoSumConf:
            print('\tOptional: SumConf...')
            strSumConf = 'SumConf'+ strFileSuffix + RASTEXT_
            arrSum = arrOutConfND.sum(axis=0)
            arrSumF = np.where(arrConfFlags, arrConfFlags, arrSum)
            rastArcU.ArrayToRaster(arrSumF, strSumConf, iDesc, bolVerbose=False)
            del arrSum, arrSumF

        del arrOutConfND

    if iOpt.bolDoLastEV:
        # This is max (last) of flattened events
        # so results will vary with flattening method
        print('\tOptional: Last EV...')
        strLastEV = 'LastEV' + RASTEXT_
        arrOutEventND = np.stack(lstArrEventF)
        arrLast = arrOutEventND.max(axis=0)
        rastArcU.ArrayToRaster(arrLast, strLastEV, iDesc, bolVerbose=False)

        del arrOutEventND, arrLast

    return lstEventFOut, lstConfFOut, strFileSuffix
コード例 #11
0
14.	def Stupa(i, j, ka, pk): 
15.	  if ka < len(zoz)-1: 
16.	    if ka >= len(zoz)-pk-1: 
17.	      try:
18.	        if pk == 0:
19.	          a[i,j] = zoz[ka][i,j]
20.	          e[i,j] = zoznvelk[ka] #+1
21.	          global er2
22.	          er2 =+ 1
23.	          return
24.	        if math.fabs(zoz[ka+pk][i,j] * 0.05) >= math.fabs(zoz[ka+pk][i,j] - zoz[ka][i,j]):
25.	          a[i,j] = zoz[ka][i,j]
26.	          e[i,j] = zoznvelk[ka] #+1
27.	          return
28.	        else:
29.	          Stupa(i, j, ka+1, pk-1) 
30.	          return
31.	      except:
32.	        global er1
33.	        er1 =+ 1
34.	    else:  
35.	      try:
36.	        if math.fabs(zoz[ka][i,j]* 0.05) < zoz[ka+pk][i,j] - zoz[ka][i,j]: 
37.	          Stupa(i, j, ka+1, pk)
38.	          return
39.	        else:
40.	          a[i,j] = zoz[ka][i,j]
41.	          e[i,j] = zoznvelk[ka] #+1
42.	          return
43.	      except:
44.	        global er3
45.	        er3 =+ 1
46.	  else: 
47.	    try:
48.	      a[i,j] = zoz[ka][i,j]
49.	      e[i,j] = zoznvelk[ka] #+1
50.	      return
51.	    except:
52.	      global er4
53.	      er4 =+ 1 
54.	def Klesa(i, j, ka, pk):
55.	  if ka < len(zoz)-1: 
56.	    if ka >= len(zoz)-pk-1:
57.	      try:
58.	        if pk == 0:
59.	          a[i,j] = zoz[ka][i,j]
60.	          e[i,j] = zoznvelk[ka] #+2         
61.	 global er2
62.	          er2 =+ 1
63.	          return
64.	        if math.fabs(zoz[ka+pk][i,j] * 0.05) >= math.fabs(zoz[ka+pk][i,j] - zoz[ka][i,j]):
65.	          a[i,j] = zoz[ka][i,j]
66.	          e[i,j] = zoznvelk[ka] #+2
67.	          return
68.	        else:
69.	          Klesa(i, j, ka+1, pk-1) 
70.	          return
71.	      except:
72.	        global er1
73.	        er1 =+ 1
74.	    else:  
75.	      try:
76.	        if math.fabs(zoz[ka][i,j]* 0.05) > zoz[ka][i,j] - zoz[ka+pk][i,j]:
77.	            Klesa(i, j, ka+1, pk)
78.	            return
79.	          else:
80.	            a[i,j] = zoz[ka][i,j]
81.	            e[i,j] = zoznvelk[ka] #+2
82.	            return
83.	      except:
84.	        global er3
85.	        er3 =+ 1
86.	  else:
87.	    try:
88.	      a[i,j] = zoz[ka][i,j]
89.	      e[i,j] = zoznvelk[ka] #+2
90.	      return
91.	    except:
92.	      global er4
93.	      er4 =+ 1      
94.	def Pocitaj(i, j, ka, pk):
95.	  if ka < len(zoz)-1: 
96.	    if ka >= len(zoz)-pk-1: 
97.	      try:
98.	        if math.fabs(zoz[ka+pk][i,j] * 0.05) >= math.fabs(zoz[ka+pk][i,j] - zoz[ka][i,j]):
99.	          a[i,j] = zoz[ka][i,j]
100.	          e[i,j] = zoznvelk[ka] #+3
101.	          return
102.	        else:
103.	          Pocitaj(i, j,ka+1,pk-1) 
104.	          return
105.	      except:
106.	        global er1
107.	        er1 =+ 1
108.	    else:   #stupa alebo klesa?
109.	      try:        
110.	        if math.fabs(zoz[ka+pk][i,j] * 0.05) >= math.fabs(zoz[ka+pk][i,j] - zoz[ka][i,j]):
111.	          if zoz[ka][i,j] < zoz[ka+pk][i,j]:
112.	            Stupa(i, j, ka, pk)
113.	            return
114.	          if zoz[ka][i,j] > zoz[ka+pk][i,j]:
115.	            Klesa(i, j, ka, pk)
116.	            return          else:
117.	            global err 
118.	            err = +1
119.	        else: 
120.	          Pocitaj(i, j, ka+1, pk)
121.	          return
122.	      except:
123.	        global er3
124.	        er3 =+ 1
125.	  else: 
126.	    try:
127.	      a[i,j] = zoz[ka][i,j]
128.	      e[i,j] = zoznvelk[ka] #+3   
129.	      return
130.	    except:
131.	      global er4
132.	      er4 =+ 1
133.	array = arcpy.RasterToNumPyArray(dtm)              
134.	velkost = array.shape                                            
135.	x=velkost[0]
136.	y=velkost[1]
137.	dsc = arcpy.Describe(dtm)
138.	SR = dsc.SpatialReference
139.	ext = dsc.Extent
140.	lower_left = arcpy.Point(ext.XMin,ext.YMin)
141.	arcpy.env.outCoordinateSystem = SR
142.	noDataValue = dsc.noDataValue
143.	c=FocalStatistics(dtm,NbrCircle(500,"MAP"),FS,"DATA")   
144.	d0=arcpy.RasterToNumPyArray(c)
145.	c=FocalStatistics(dtm,NbrCircle(625,"MAP"),FS,"DATA")   
146.	d1=arcpy.RasterToNumPyArray(c)
147.	c=FocalStatistics(dtm,NbrCircle(750,"MAP"),FS,"DATA")
148.	d2=arcpy.RasterToNumPyArray(c)   
149.	c=FocalStatistics(dtm,NbrCircle(875,"MAP"),FS,"DATA")   
150.	d3=arcpy.RasterToNumPyArray(c)
151.	c=FocalStatistics(dtm,NbrCircle(1000,"MAP"),FS,"DATA")
152.	d4=arcpy.RasterToNumPyArray(c)      
153.	c=FocalStatistics(dtm,NbrCircle(1125,"MAP"),FS,"DATA")   
154.	d5=arcpy.RasterToNumPyArray(c)
155.	c=FocalStatistics(dtm,NbrCircle(1250,"MAP"),FS,"DATA")   
156.	d6=arcpy.RasterToNumPyArray(c)      
157.	c=FocalStatistics(dtm,NbrCircle(1375,"MAP"),FS,"DATA")   
158.	d7 = arcpy.RasterToNumPyArray(c)     
159.	c=FocalStatistics(dtm,NbrCircle(1500,"MAP"),FS,"DATA")
160.	d8 = arcpy.RasterToNumPyArray(c)
161.	c=FocalStatistics(dtm,NbrCircle(1625,"MAP"),FS,"DATA")
162.	d9 = arcpy.RasterToNumPyArray(c)
163.	c=FocalStatistics(dtm,NbrCircle(1750,"MAP"),FS,"DATA")
164.	d10 = arcpy.RasterToNumPyArray(c)
165.	c=FocalStatistics(dtm,NbrCircle(1875,"MAP"),FS,"DATA")
166.	d11 = arcpy.RasterToNumPyArray(c)        
167.	c=FocalStatistics(dtm,NbrCircle(2000,"MAP"),FS,"DATA")
168.	d12 = arcpy.RasterToNumPyArray(c)
169.	c=FocalStatistics(dtm,NbrCircle(2125,"MAP"),FS,"DATA")   
170.	d13 = arcpy.RasterToNumPyArray(c)
171.	c=FocalStatistics(dtm,NbrCircle(2250,"MAP"),FS,"DATA")   
172.	d14 = arcpy.RasterToNumPyArray(c)      
173.	c=FocalStatistics(dtm,NbrCircle(2375,"MAP"),FS,"DATA")   
174.	d15 = arcpy.RasterToNumPyArray(c)     
175.	c=FocalStatistics(dtm,NbrCircle(2500,"MAP"),FS,"DATA")
176.	d16 = arcpy.RasterToNumPyArray(c)
177.	c=FocalStatistics(dtm,NbrCircle(2625,"MAP"),FS,"DATA")
178.	d17 = arcpy.RasterToNumPyArray(c)
179.	c=FocalStatistics(dtm,NbrCircle(2750,"MAP"),FS,"DATA")
180.	d18 = arcpy.RasterToNumPyArray(c)
181.	c=FocalStatistics(dtm,NbrCircle(2875,"MAP"),FS,"DATA")
182.	d19 = arcpy.RasterToNumPyArray(c)        
183.	c=FocalStatistics(dtm,NbrCircle(3000,"MAP"),FS,"DATA")
184.	d20 = arcpy.RasterToNumPyArray(c)
185.	del c
186.	zoz = [d0, d1, d2, d3, d4, d5, d6, d7, d8, d9, d10, d11, d12, d13, d14, d15, d16, d17, d18, d19, d20]
187.	zoznvelk = [pee for pee in range(1000, 6250, 250)] 
188.	a = np.zeros((x, y))
189.	e = np.zeros((x, y))  
190.	for i in range(0,x):
191.	  for j in range(0,y):
192.	    Pocitaj(i, j, 0, 2) 
193.	print "chyby:",er1,er2,er3,er4,er5, err,
194.	raster = arcpy.NumPyArrayToRaster (a, lower_left, dsc.meanCellWidth, dsc.meanCellHeight,noDataValue)
195.	raster.save("priem_vy_odU")
196.	
197.	raster2=arcpy.NumPyArrayToRaster(e, lower_left, dsc.meanCellWidth, dsc.meanCellHeight,noDataValue) 
198.	raster2.save("Velk_oKna")
199.	del raster
200.	del raster2
 
コード例 #12
0
    viewpoint = arcpy.Point(lon, lat)
    # defaultheight=0.0
    describe = arcpy.Describe(raster)
    cell_size_x = describe.meanCellWidth
    cell_size_y = describe.meanCellHeight
    spatialReference = describe.spatialReference
    extent = describe.Extent
    rows = raster.height
    columns = raster.width
    column = int(
        float(viewpoint.X - extent.XMin) / (extent.XMax - extent.XMin) *
        columns)
    row = int(
        float(viewpoint.Y - extent.YMax) / (extent.YMin - extent.YMax) * rows)
    lower_left = arcpy.Point(X=extent.XMin, Y=extent.YMin)

    #initialize
    arrays = arcpy.RasterToNumPyArray(raster)
    arrays[row][column] += defaultheight
    rp = ReferencePlaneLocalArea(rows, columns, arrays)
    rp.run(row, column)
    result = rp.getVisibility()
    result = arcpy.NumPyArrayToRaster(result,
                                      lower_left_corner=lower_left,
                                      x_cell_size=cell_size_x,
                                      y_cell_size=cell_size_y)
    arcpy.DefineProjection_management(result, spatialReference)
    # print 'saving:', time.time()
    result.save(output)
    # print 'project done:', time.time()