def find_las_files_by_polygon(file_list, polygon, print_keep=False):
    las_files = np.genfromtxt(file_list, delimiter=',', dtype='S256')
    keep = []
    n_files = las_files.size
    for i in range(0, n_files):
        UR, LR, UL, LL = get_lasfile_bbox(las_files[i])
        las_box = np.asarray([UR, LR, LL, UL])
        x, y, inside = lidar.points_in_poly(las_box[:, 0], las_box[:, 1],
                                            polygon)  # fix this test
        if inside.sum() > 0:
            keep.append(las_files[i])
        else:
            x, y, inside = lidar.points_in_poly(polygon[:, 0], polygon[:, 1],
                                                las_box)
            if inside.sum() > 0:
                keep.append(las_files[i])
    if print_keep:
        print('las tiles to load in:', len(keep))
        for ll in range(0, len(keep)):
            print(keep[ll])
    return keep
def calculate_bestfit_LAD_profile(subplot_coordinate_file,
                                  LAI_file,
                                  las_file,
                                  Plot_name,
                                  minimum_height=0):
    subplot_polygons, subplot_labels = aux.load_boundaries(
        subplot_coordinate_file)
    field_LAI = aux.load_field_LAI(LAI_file)
    lidar_pts = lidar.load_lidar_data(las_file)

    n_subplots = subplot_polygons[Plot_name].shape[0]
    max_height = 80
    layer_thickness = 1
    n_layers = np.ceil(max_height / layer_thickness)
    subplot_lidar_profiles = np.zeros((n_subplots, n_layers))
    n_ground_returns = np.zeros(n_subplots)
    subplot_LAI = np.zeros(n_subplots)

    for i in range(0, n_subplots):
        print "Subplot: ", subplot_labels[Plot_name][i]
        sp_pts = lidar.filter_lidar_data_by_polygon(
            lidar_pts, subplot_polygons[Plot_name][i, :, :])
        heights, subplot_lidar_profiles[
            i, :], n_ground_returns[i] = bin_returns(sp_pts, max_height,
                                                     layer_thickness)
        subplot_LAI[i] = field_LAI['LAI'][np.all(
            (field_LAI['Subplot']
             == subplot_labels[Plot_name][i], field_LAI['Plot'] == Plot_name),
            axis=0)]

    kmin = 0.20
    kmax = 5.
    kinc = 0.005
    misfit, ks, best_k_LAD_profiles, best_k = minimise_misfit_for_k(
        kmin, kmax, kinc, subplot_LAI, subplot_lidar_profiles,
        n_ground_returns, layer_thickness, minimum_height)
    return heights, best_k_LAD_profiles
Exemple #3
0
def load_lidar_file_by_polygon(lasfile,
                               polygon,
                               max_pts_per_tree=10**6,
                               print_keep=False):
    W = polygon[:, 0].min()
    E = polygon[:, 0].max()
    S = polygon[:, 1].min()
    N = polygon[:, 1].max()
    tile_pts = load_lidar_data_by_bbox(lasfile, N, S, E, W, print_npts=False)
    pts = lidar.filter_lidar_data_by_polygon(tile_pts, polygon, print_keep)
    # now create KDTrees
    starting_ids, trees = create_KDTree(pts)

    print "loaded ", pts.shape[0], " points"
    return pts, starting_ids, trees
Exemple #4
0
def find_laz_files_by_polygon(file_list, polygon, print_keep=False):
    laz_files = np.genfromtxt(file_list, delimiter=',', dtype='S256')
    keep = []
    n_files = laz_files.size
    for i in range(0, n_files):
        os.system("las2las %s temp.las" % laz_files[i])
        UR, LR, UL, LL = get_lasfile_bbox('temp.las')
        las_box = np.asarray([UR, LR, LL, UL])
        x, y, inside = lidar.points_in_poly(las_box[:, 0], las_box[:, 1],
                                            polygon)
        if inside.sum() > 0:
            keep.append(laz_files[i])
        os.system("rm temp.las")
    if print_keep:
        print 'las tiles to load in:', len(keep)
        for ll in range(0, len(keep)):
            print keep[ll]
    return keep
def find_laz_files_by_polygon(file_list, polygon, print_keep=False):
    laz_files = np.genfromtxt(file_list, delimiter=',', dtype='unicode')
    keep = []
    n_files = laz_files.size
    for i in range(0, n_files):
        temp_file = 'temp_%i.las' % np.round(
            np.random.random() * 10**9).astype(int)
        os.system("las2las %s %s" % (laz_files[i], temp_file))
        UR, LR, UL, LL = get_lasfile_bbox(temp_file)
        las_box = np.asarray([UR, LR, LL, UL])
        x, y, inside = lidar.points_in_poly(las_box[:, 0], las_box[:, 1],
                                            polygon)
        if inside.sum() > 0:
            keep.append(laz_files[i])
        os.system("rm %s" % temp_file)
    if print_keep:
        print('las tiles to load in:', len(keep))
        for ll in range(0, len(keep)):
            print(keep[ll])
    return keep
Exemple #6
0
# MAIN ANALYSIS
# LiDAR PROFILES LOOP
# loop through all plots to be analysed
for pp in range(0, N_plots):
    print(Plots[pp])
    Plot_name = Plots[pp]
    n_subplots = subplot_polygons[Plot_name].shape[0]

    #------------------------------------------------------------------------------------
    # CLIP DATA TO PLOT
    # clip LiDAR point cloud to plot level (this makes subsequent processing much faster)
    n_coord_pairs = subplot_polygons[Plot_name].shape[0] * subplot_polygons[
        Plot_name].shape[1]
    coord_pairs = subplot_polygons[Plot_name].reshape(n_coord_pairs, 2)
    bbox_polygon = aux.get_bounding_box(coord_pairs)
    plot_lidar_pts = lidar.filter_lidar_data_by_polygon(
        all_lidar_pts, bbox_polygon, filter_by_first_return_location=True)

    #------------------------------------------------------------------------------------
    # SET UP ARRAYS TO HOST RESULTS
    # get some subplot-level information
    n_subplots = subplot_polygons[Plot_name].shape[0]
    for ss in range(0, n_subplots):
        subplot_index = int(subplot_labels[Plot_name][ss] - 1)
        # filter lidar points into subplot
        sp_pts = lidar.filter_lidar_data_by_polygon(
            plot_lidar_pts,
            subplot_polygons[Plot_name][ss, :, :],
            filter_by_first_return_location=True)

    # set up some arrays to host the radiative transfer based profiles
    PAD_pla = np.zeros((n_subplots, heights_rad.size, max_return))
        #print '\t\t', target_point_density[dd],'-' , pts_iter.shape[0]/100.**2 , count
        starting_ids, trees = io.create_KDTree(pts_iter)
        # loop through each sampling resolution
        for ss in range(0, sample_res.size):
            print '\t - sample res = ', keys[ss]
            n_subplots = len(subplots[keys[ss]])
            # for each of the subplots, clip point cloud and model PAD and get the metrics
            for pp in range(0, n_subplots):
                # query the tree to locate points of interest
                # note that we will only have one tree for number of points in sensitivity analysis
                centre_x = np.mean(subplots[keys[ss]][pp][0:4, 0])
                centre_y = np.mean(subplots[keys[ss]][pp][0:4, 1])
                radius = np.sqrt(sample_res[ss]**2 / 2.)
                ids = trees[0].query_ball_point([centre_x, centre_y], radius)
                sp_pts = lidar.filter_lidar_data_by_polygon(
                    pts_iter[ids], subplots[keys[ss]][pp])
                #------
                heights, first_return_profile, n_ground_returns = LAD1.bin_returns(
                    sp_pts, max_height, layer_thickness)
                PAD_profiles_MH[keys[ss]][keys_2[dd]][
                    ii, pp, :] = LAD1.estimate_LAD_MacArthurHorn(
                        first_return_profile, n_ground_returns,
                        layer_thickness, kappa)
                #------
                u, n, I, U = LAD2.calculate_LAD(sp_pts, heights_rad, max_k,
                                                'spherical')
                PAD_profiles_rad1[keys[ss]][keys_2[dd]][
                    ii, pp, :] = u[::-1][1:].copy()
                #------
                u, n, I, U = LAD2.calculate_LAD_DTM(sp_pts, heights_rad, max_k,
                                                    'spherical')
Exemple #8
0
def load_lidar_data_by_polygon(file_list,
                               polygon,
                               max_pts_per_tree=10**6,
                               laz_files=False,
                               print_keep=False):
    W = polygon[:, 0].min()
    E = polygon[:, 0].max()
    S = polygon[:, 1].min()
    N = polygon[:, 1].max()
    if laz_files:
        keep_files = find_laz_files_by_polygon(file_list, polygon, print_keep)
    else:
        keep_files = find_las_files_by_polygon(file_list, polygon, print_keep)

    n_files = len(keep_files)
    trees = []
    starting_ids = np.asarray([])

    # first case scenario that no points in ROI
    print '\t\tloading %.0f tiles...' % n_files
    start = time.time()
    if n_files == 0:
        print 'WARNING: No files within specified polygon - try again'
        pts = np.array([])

    # otherwise, we have work to do!
    else:
        if laz_files:
            os.system("las2las %s temp.las" % keep_files[0])
            tile_pts = load_lidar_data_by_bbox('temp.las',
                                               N,
                                               S,
                                               E,
                                               W,
                                               print_npts=False)
            os.system("rm temp.las")
        else:
            tile_pts = load_lidar_data_by_bbox(keep_files[0],
                                               N,
                                               S,
                                               E,
                                               W,
                                               print_npts=False)

        pts = lidar.filter_lidar_data_by_polygon(tile_pts, polygon)

        # now repeat for subsequent tiles
        for i in range(1, n_files):
            if laz_files:
                os.system("las2las %s temp.las" % keep_files[i])
                tile_pts = load_lidar_data_by_bbox('temp.las',
                                                   N,
                                                   S,
                                                   E,
                                                   W,
                                                   print_npts=False)
                os.system("rm temp.las")
            else:
                tile_pts = load_lidar_data_by_bbox(keep_files[i],
                                                   N,
                                                   S,
                                                   E,
                                                   W,
                                                   print_npts=False)

            pts_ = lidar.filter_lidar_data_by_polygon(tile_pts, polygon)
            pts = np.concatenate((pts, pts_), axis=0)

    end = time.time()
    print '\t\t\t...%.3f s' % (end - start)
    # now create KDTrees
    print '\t\tbuilding KD-trees...'
    start = time.time()
    starting_ids, trees = create_KDTree(pts)
    end = time.time()
    print '\t\t\t...%.3f s' % (end - start)

    print "loaded ", pts.shape[0], " points into ", len(trees), " KDTrees"
    return pts, starting_ids, trees
                ids = trees_collated[pp][tt].query_ball_point(
                    np.array([centre_x, centre_y]), radius)
                if len(ids) > 0:
                    if sample_pts.size == 0:
                        sample_pts = lidar_pts[np.asarray(ids) +
                                               starting_ids_for_trees[tt]]
                    else:
                        sample_pts = np.concatenate(
                            (sample_pts,
                             lidar_pts[np.asarray(ids) +
                                       starting_ids_for_trees[tt]]),
                            axis=0)

            # keep only first returns
            sample_pts = sample_pts[sample_pts[:, 3] == 1, :]
            sp_pts = lidar.filter_lidar_data_by_polygon(
                sample_pts, subplots[pp][keys[ss]][sp])
            #------
            heights, first_return_profile, n_ground_returns = MH.bin_returns(
                sp_pts, max_height, layer_thickness)
            PADprof = MH.estimate_LAD_MacArthurHorn(first_return_profile,
                                                    n_ground_returns,
                                                    layer_thickness, k)
            # remove lowermost portion of profile
            PADprof[heights < min_height] = 0
            PAI_res[keys[ss]][pp, sp] = PADprof.sum()
        PAI_mean[pp, ss] = np.mean(PAI_res[keys[ss]][pp, :])
        PAI_sd[pp, ss] = np.std(PAI_res[keys[ss]][pp, :])

PAI_serr = np.zeros(PAI_sd.shape)
for pp in range(0, 3):
    for ss in range(0, res.size):
Exemple #10
0
        N_trees = len(trees)
        # for each of the subplots, clip point cloud and model PAD and get the metrics
        for pp in range(0,n_subplots):
            # query the tree to locate points of interest
            # note that we will only have one tree for the number of points in sensitivity analysis
            centre_x = np.mean(subplots[pp][0:4,0])
            centre_y = np.mean(subplots[pp][0:4,1])
            radius = np.sqrt(sample_res**2/2.)

            # retrieve point clouds samples
            sp_pts = np.array([])
            for tt in range(0,N_trees):
                ids = trees[tt].query_ball_point([centre_x,centre_y], radius)
                if len(ids)>0:
                    if sp_pts.size==0:
                        sp_pts = lidar.filter_lidar_data_by_polygon(pts_iter[np.asarray(ids)+starting_ids[tt]],subplots[pp])
                    else:
                        sp_iter = lidar.filter_lidar_data_by_polygon(pts_iter[np.asarray(ids)+starting_ids[tt]],subplots[pp])
                        sp_pts = np.concatenate((sp_pts,sp_iter),axis=0)
                        sp_iter = None
                #------
            if sp_pts.size==0:
                PAD_profiles_MH[keys[dd]][ii,pp,:] = np.nan
                PAD_profiles_rad2[keys[dd]][ii,pp,:]=np.nan
                penetration_limit[keys[dd]][ii,pp,:] = 1.
            elif np.sum(sp_pts[:,3]==1)>0:
                heights,first_return_profile,n_ground_returns = LAD1.bin_returns(sp_pts, max_height, layer_thickness)
                PAD_profiles_MH[keys[dd]][ii,pp,:] = LAD1.estimate_LAD_MacArthurHorn(first_return_profile, n_ground_returns, layer_thickness, kappa)
                penetration_limit[keys[dd]][ii,pp,:] = np.cumsum(first_return_profile)==0
                #------
                u,n,I,U = LAD2.calculate_LAD_DTM(sp_pts,heights_rad,max_k,'spherical')
MacArthurHorn_LAD = {}
radiative_LAD = {}
radiative_DTM_LAD = {}
inventory_LAD = {}
lidar_profiles = {}
lidar_profiles_adjusted = {}

MacArthurHorn_LAI = {}
radiative_LAI = {}
radiative_DTM_LAI = {}
inventory_LAI = {}
Hemisfer_LAI = {}

# load coordinates and lidar points for target areas
subplot_polygons, subplot_labels = aux.load_boundaries(subplot_coordinate_file)
all_lidar_pts = lidar.load_lidar_data(las_file)

# load field data and retrieve allometric relationships
field_data = field.load_crown_survey_data(field_file)
a, b, CF, r_sq, p, H, D = field.retrieve_crown_allometry(allometry_file)
a_ht, b_ht, CF_ht, a_A, b_A, CF_A = field.calculate_allometric_equations_from_survey(
    field_data)

# load LAI estimates from hemiphotos
field_LAI = aux.load_field_LAI(LAI_file)

# loop through all plots to be analysed
for pp in range(0, N_plots):
    print Plots[pp]
    Plot_name = Plots[pp]
    # clip LiDAR point cloud to plot level (this makes subsequent processing much faster)
#minimum_height = 2. # ignore profiles <2 m due to difficulties distinguishing ground return increasing error
minimum_height = float(sys.argv[6])
# this second set of parameters is only used for the radiative transfer model
#leaf_angle_dist = 'spherical' # other options include 'erectophile' and 'planophile'
leaf_angle_dist = sys.argv[8]
#max_return = 3
max_return = int(sys.argv[9])

# choose method
method = sys.argv[10]

# output details
out_file = sys.argv[11]

# load LiDAR point cloud and clip to neighbourhood around a specified point
lidar_pts = lidar.load_lidar_data(las_file)
sample_pts = lidar_pts  # we assume that all fitering has already been done.

#sample_pts = lidar.filter_lidar_data_by_neighbourhood(lidar_pts,target_xy,radius)
#sample_pts = lidar.filter_lidar_data_by_polygon(lidar_pts,polygon) # there is also scope to clip point clouds using a polygon if preferred, but for camera trap, point centres are probably better options.

# MacArthur-Horn method (Stark et al., 2012)
if method == 'macarthur_horn':
    print "using MacArthur-Horn model"
    heights, LAD = LAD1.estimate_LAD_MacArthurHorn_full(
        sample_pts, max_height, layer_thickness, minimum_height)

#Radiative transfer approach (Milodowski building on Detto et al., 2015)
elif method == 'radiative_transfer':
    print "using radiative transfer model"
    heights, LAD = LAD2.calculate_LAD_rad_DTM_full(sample_pts, max_height,
Exemple #13
0
            # get pixel boundaries
            pixel_bbox = np.array(
                [[x_iter - raster_res / 2., y_iter - raster_res / 2.],
                 [x_iter - raster_res / 2., y_iter + raster_res / 2.],
                 [x_iter + raster_res / 2., y_iter + raster_res / 2.],
                 [x_iter + raster_res / 2., y_iter - raster_res / 2.],
                 [x_iter - raster_res / 2., y_iter - raster_res / 2.]])

            # retrieve point clouds samples
            sample_pts = np.array([])
            for tt in range(0, N_trees):
                ids = trees[tt].query_ball_point([x_iter, y_iter], radius)
                if len(ids) > 0:
                    if sample_pts.size == 0:
                        sample_pts = lidar.filter_lidar_data_by_polygon(
                            lidar_pts[np.asarray(ids) +
                                      starting_ids_for_trees[tt]], pixel_bbox)
                        #sample_pts = lidar_pts[np.asarray(ids)+starting_ids_for_trees[tt]]
                    else:
                        sample_iter = lidar.filter_lidar_data_by_polygon(
                            lidar_pts[np.asarray(ids) +
                                      starting_ids_for_trees[tt]], pixel_bbox)
                        sample_pts = np.concatenate((sample_pts, sample_iter),
                                                    axis=0)
                        sample_iter = None

            # If we have the returns, then calculate metric of interest - in
            # this case the PAI
            if sample_pts.size > 0:
                if np.sum(sample_pts[:, 3] == 1) > 0:
                    # calculate PAD profile
Exemple #14
0
rcParams['legend.numpoints'] = 1
axis_size = rcParams['font.size'] + 2

chem_file = '../../../BALI_traits_data/CombinedPlots/BALI_traits_CN_29032017.csv'
spp_file = '../../../BALI_traits_data/CombinedPlots/BALI_species_28022017.csv'
photo_file = '../../../BALI_traits_data/CombinedPlots/Photosynthesis_Combined_14122016_hyphens.csv'
branch_file = '../../../BALI_traits_data/CombinedPlots/ParameterTrees_Combined_14122016.csv'
leaf_file = '../../../BALI_traits_data/CombinedPlots/LeafArea_Combined_14122016.csv'
census_file = '/home/dmilodow/DataStore_DTM/BALI/BALI_Cplot_data/SAFE_CarbonPlots_TreeCensus.csv'
field_file = '/home/dmilodow/DataStore_DTM/BALI/LiDAR/Data/Local/SAFE_DANUM_carbonplots_FieldMapcensus2016.csv'
light_file = '/exports/csce/datastore/geos/users/dmilodow/BALI/LiDAR/src/output/BALI_subplot_lighttransmittance.npz'
subplot_coordinate_file = '/home/dmilodow/DataStore_DTM/BALI/LiDAR/src/BALI_subplot_coordinates_corrected.csv'  #check
las_file = '/home/dmilodow/DataStore_DTM/BALI/LiDAR/src/Carbon_plot_point_cloud_buffer.las'

# get LiDAR data and sort into plots
lidar_pts = lidar.load_lidar_data(las_file)
plot_pts = {}

# get lower left hand corner of plots
subplot_polygons, subplot_labels = aux.load_boundaries(subplot_coordinate_file)
plot_origin = {}
buffer_width = 5.
for i in range(0, len(subplot_polygons.keys())):
    plot = subplot_polygons.keys()[i]
    print plot
    if plot == 'Seraya':
        plot_origin[plot] = subplot_polygons[plot][subplot_labels['Seraya'] ==
                                                   6, 0, :][0]
    elif plot == 'DC1':
        plot_origin[plot] = subplot_polygons[plot][subplot_labels['DC1'] == 1,
                                                   3, :][0]
max_height = 80
max_return = 4
layer_thickness = 1
n_layers = np.ceil(max_height/layer_thickness)
minimum_height = 2.

# store profiles in dictionaries
radiative_spherical_LAD = {}
radiative_spherical_adjusted_LAD = {}
radiative_spherical_no_azimuth_LAD = {}
lidar_profiles ={}
lidar_profiles_adjusted = {}

#subplot_polygons, subplot_labels = aux.load_boundaries(subplot_coordinate_file)
subplot_polygons = aux.load_generic_boundaries(coordinate_file)
all_pts = lidar.load_lidar_data(las_file)

for pp in range(0,N_plots):
    Plot_name=Plots[pp]
    bbox_polygon = subplot_polygons[Plot_name] 
    print Plot_name
    lidar_pts = lidar.filter_lidar_data_by_polygon(all_pts,bbox_polygon)

    heights_rad = np.arange(0,max_height+1)
    LAD_profiles_spherical=np.zeros((heights_rad.size,max_return))
    LAD_profiles_spherical_adjusted=np.zeros((heights_rad.size,max_return))
    LAD_profiles_spherical_no_azimuth=np.zeros((heights_rad.size,max_return))
    # first get LAD distribution following Detto et al., 2015
    for rr in range(0,max_return):
        max_k=rr+1
        u,n,I,U = LAD2.calculate_LAD(lidar_pts,heights_rad,max_k,'spherical')
chm = np.zeros((rows,cols),dtype='float')*np.nan

#-----------------------------------------------------
# now get highest return in each grid cell to define the CHM
print("generating CHM")
#-------------------
starting_ids, trees = io.create_KDTree(pts)
# for each of the subplots, clip point cloud and model PAD and get the metrics
for pp in range(0,n_subplots):
    # query the tree to locate points of interest
    # note that we will only have one tree for the number of points in sensitivity analysis
    centre_x = np.mean(subplots[pp][0:4,0])
    centre_y = np.mean(subplots[pp][0:4,1])
    radius = np.sqrt(sample_res**2/2.)
    ids = trees[0].query_ball_point([centre_x,centre_y], radius)
    sp_pts = lidar.filter_lidar_data_by_polygon(pts[ids],subplots[pp],filter_by_first_return_location=False)
    #------
    if np.sum(sp_pts[:,2]>=0)>0: # check for returns within this column
        chm[row_idx[pp],col_idx[pp]] = np.max(sp_pts[:,2])
    else:
        # adaptive neighbourhood expansion to account for penetration
        # limitations, particularly for fine grids/low point densities
        nodata_test = np.isnan(chm[row_idx[pp],col_idx[pp]])

        while nodata_test:
            # expand neighbourhood for point cloud sample
            sp_pts_iter = pts[trees[0].query_ball_point([centre_x,centre_y], radius)]
            if np.sum(sp_pts_iter[:,2]>=0)>0: # check for returns within this column
                chm[row_idx[pp],col_idx[pp]] = np.max(sp_pts_iter[:,2])
            radius+=0.5
            nodata_test = np.isnan(chm[row_idx[pp],col_idx[pp]])
Exemple #17
0
# MAIN ANALYSIS
# LiDAR PROFILES LOOP
# loop through all plots to be analysed
for pp in range(0, N_plots):
    print(Plots[pp])
    Plot_name = Plots[pp]
    n_subplots = subplot_polygons[Plot_name].shape[0]

    #------------------------------------------------------------------------------------
    # CLIP DATA TO PLOT
    # clip LiDAR point cloud to plot level (this makes subsequent processing much faster)
    n_coord_pairs = subplot_polygons[Plot_name].shape[0] * subplot_polygons[
        Plot_name].shape[1]
    coord_pairs = subplot_polygons[Plot_name].reshape(n_coord_pairs, 2)
    bbox_polygon = aux.get_bounding_box(coord_pairs)
    plot_lidar_pts = lidar.filter_lidar_data_by_polygon(
        all_lidar_pts, bbox_polygon, filter_by_first_return_location=True)
    starting_ids, trees = io.create_KDTree(
        plot_lidar_pts)  # build kd-tree for plot lidar points
    print("canopy height = ",
          np.percentile(plot_lidar_pts[plot_lidar_pts[:, 3] == 1, 2], 99), "m")

    #------------------------------------------------------------------------------------
    # SET UP ARRAYS TO HOST RESULTS
    # get some subplot-level information
    n_subplots = subplot_polygons[Plot_name].shape[0]

    # set up some arrays to host the MacArthur-Horn profiles
    LAD_MH = np.zeros((n_subplots, heights.size))

    #------------------------------------------------------------------------------------
    # LOOP THROUGH SUBPLOTS, CALCULATING CANOPY PROFILES
 #-------------------
 starting_ids, trees = io.create_KDTree(pts_iter)
 # loop through each sampling resolution
 for ss in range(0, sample_res.size):
     print('\t - sample res = ', keys[ss])
     n_subplots = len(subplots[keys[ss]])
     # for each of the subplots, clip point cloud and model PAD and get the metrics
     for pp in range(0, n_subplots):
         # query the tree to locate points of interest
         # note that we will only have one tree for the number of points in sensitivity analysis
         centre_x = np.mean(subplots[keys[ss]][pp][0:4, 0])
         centre_y = np.mean(subplots[keys[ss]][pp][0:4, 1])
         radius = np.sqrt(sample_res[ss]**2 / 2.)
         ids = trees[0].query_ball_point([centre_x, centre_y], radius)
         sp_pts = lidar.filter_lidar_data_by_polygon(
             pts_iter[ids],
             subplots[keys[ss]][pp],
             filter_by_first_return_location=True)
         #------
         if np.sum(sp_pts[:, 3] ==
                   1) > 0:  # check for returns within this column
             heights, first_return_profile, n_ground_returns = LAD1.bin_returns(
                 sp_pts, max_height, layer_thickness)
             mh_profile = LAD1.estimate_LAD_MacArthurHorn(
                 first_return_profile, n_ground_returns,
                 layer_thickness, kappa)
             pen_limit = np.cumsum(first_return_profile) == 0
             #------
             heights, weighted_return_profile, weighted_n_ground_returns = LAD1.bin_returns_weighted_by_num_returns(
                 sp_pts, max_height, layer_thickness)
             mh_wt_profile = LAD1.estimate_LAD_MacArthurHorn(
                 weighted_return_profile,
                                    weights = results_iter['weights'] / np.sum(
                                        np.isfinite(
                                            results_iter['raster_values'][0].
                                            values) * results_iter['weights'])
                                    cover_mc[ii] = np.sum(
                                        (results_iter['raster_values']
                                         [0].values >= gap_ht) * weights)
                                    quantiles_mc[ii] = st.weighted_quantiles(
                                        results_iter['raster_values']
                                        [0].values, results_iter['weights'],
                                        quantiles)

                                    # now get canopy cover directly from LiDAR point cloud
                                    pts_sub = lidar_tools.filter_lidar_data_by_neighbourhood(
                                        lidar_pts, [
                                            subplot['geometry']['coordinates']
                                            [0] + xerr, subplot['geometry']
                                            ['coordinates'][1] + yerr
                                        ], radius)

                                    point_heights = np.zeros(pts_sub.shape[0])
                                    for idx in range(0, point_heights.size):
                                        dist, pixel_id = dem_trees[0].query(
                                            pts_sub[idx, :2], k=1)
                                        point_heights[idx] = pts_sub[
                                            idx, 2] - ZZ[pixel_id]

                                    point_weights = 1 / pts_sub[:, 7]
                                    canopy_mask = point_heights >= gap_ht
                                    cover_fraction_from_pointcloud_mc[
                                        ii] = np.sum(
                                            point_weights[canopy_mask] /