コード例 #1
0
def mCH(las_file):
    norm_file = las_file.replace(".las", "_normalized.las")
    noise_file = las_file.replace(".las", "_noise.las")
    mch_file = las_file.replace(".las", "_mCH.bil")

    epsg = '32611'
    res = str(0.10)
    gp_file = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_las_proc\\OUTPUT_FILES\\LAS\\19_149_las_proc_ground_thinned_merged.las"

    noise_class = str(7)
    noise_isolation = str(2)
    noise_step = str(0.25)

    z_min = str(1)
    z_max = str(30)

    # normalize heights
    lasheight_cmd = [
        lastools_dir + "lasheight.exe", '-i', las_file, '-epsg', epsg,
        '-ground_points', gp_file, '-all_ground_points', '-replace_z', '-o',
        norm_file
    ]

    pcs = subprocess.Popen(lasheight_cmd)  # start process
    pcs.wait()  # wait for it to finish

    # normalize heights
    lasnoise_cmd = [
        lastools_dir + "lasnoise.exe", '-i', norm_file, '-epsg', epsg,
        '-isolated', noise_isolation, '-step', noise_step, '-classify_as',
        noise_class, '-o', noise_file
    ]

    pcs = subprocess.Popen(lasnoise_cmd)  # start process
    pcs.wait()  # wait for it to finish

    # calculate mean canopy height
    lasgrid_cmd = [
        lastools_dir + "lasgrid.exe", '-i', noise_file, '-epsg', epsg,
        '-keep_z', z_min, z_max, '-drop_class', noise_class, '-step', res,
        '-elevation', '-mean', '-o', mch_file
    ]

    pcs = subprocess.Popen(lasgrid_cmd)  # start process
    pcs.wait()  # wait for it to finish

    ras = raslib.raster_load(mch_file)
    ras.data[ras.data == ras.no_data] = 0
    raslib.raster_save(ras, mch_file, file_format="EHdr")
コード例 #2
0
nearest_out = output_dir + file_base + "_prom_nearest.tif"
distance_out = output_dir + file_base + "_prom_distance_to_tree.tif"

# parameters
z_min = 2  # lower elevation limit in elevation units (all cells below will be masked out of search)
z_step = 0.25  # vertical resolution of prominence calculation in elevation units

##

# make output dir
if not os.path.exists(output_dir):
    os.makedirs(output_dir)

# load raster
print("Loading raster")
ras = raslib.raster_load(elev_in)
elev = ras.data.copy()  # rename for legible coding

print("Identifying peaks")

# define mask of valid data above  z_min
mask = (elev != ras.no_data) & (elev >= z_min)
# add 1-pixel buffer to mask
mask[0, :] = False
mask[ras.rows - 1, :] = False
mask[:, 0] = False
mask[:, ras.cols - 1] = False

# find peaks above z_min in 9-neighborhood
local_maxi = peak_local_max(elev,
                            threshold_abs=z_min,
コード例 #3
0
from libraries import raslib

ras_in = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_las_proc\\OUTPUT_FILES\\RAS\\19_149_ground_point_density_r.10m.bil"

ras = raslib.raster_load(ras_in)
ras.data[ras.data == ras.no_data] = 0

ras_out = ras_in.replace(".bil", "_nona.tif")
raslib.raster_save(ras, ras_out)
コード例 #4
0
file_base = treetops_out.split("\\")[-1].replace("treetops.csv", "")
index_out = output_dir + file_base + "index_.10m.tif"
distance_out = output_dir + file_base + "distance_.10m.tif"

# make output dir
if not os.path.exists(output_dir):
    os.makedirs(output_dir)

# parameters
z_min = 2
min_obj_diam_m = .7  # in meters
subpix_noise = True  # when true, peaks are randomly shifted at the subpixel scale (relative to CHM) to eliminate lattice effects in subsequent raster products

# load CHM
ras = raslib.raster_load(ras_in)

# define mask of valid data above  z_min
# mask = (ras.data != ras.no_data) & (ras.data >= z_min)
mask = (ras.data != ras.no_data)

# build opening structure
min_obj_diam_pix = min_obj_diam_m/ras.T0[0]

def mask_gen(size):
    # generates circular mask of diameter mask_size (expected crown domain)
    # force odd size
    if (np.floor(size / 2) == size / 2):
        odd_size = size + 1
    else:
        odd_size = size
コード例 #5
0
def main():
    """
    Calculates raster distance to canopy edge from canopy height model following Mazzotti et al. 2019
    :return:
    """

    import numpy as np
    from scipy.ndimage import convolve
    from libraries import raslib
    import os

    # config
    ras_dir = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_las_proc\\OUTPUT_FILES\\CHM\\"
    # ras_file = "19_149_spike_free_chm_r.10m.tif"
    ras_file = "19_149_spike_free_chm_r.25m.tif"
    ras_in = ras_dir + ras_file  # canopy height model in
    # step_size = 0.10  # in m
    step_size = 0.25  # in m
    canopy_min_elev = 2  # height of "canopy edge"
    kernel_dim = 3  # step size = (kernel_dim - 1)/2
    max_scan = 100  # max number of steps
    file_out = ras_in.replace('CHM', 'DCE').replace('.tif', '_dce.tif')

    # load raster
    ras = raslib.raster_load(ras_in)

    # define canopy binary
    canopy = np.full([ras.rows, ras.cols], 0)
    canopy[ras.data >= canopy_min_elev] = 1

    # preallocate distance to canopy edge (DCE) record
    record = np.full([ras.rows, ras.cols], np.nan)

    kernel = np.full([kernel_dim, kernel_dim], 1)

    binary = canopy.copy()
    #while scan:
    for ii in range(1, max_scan):
        convolved = convolve(binary, kernel)
        contenders = np.max([binary * (kernel_dim ** 2), convolved], 0)
        edges = (contenders > 0) & (contenders < kernel_dim ** 2)
        binary[edges] = 1
        record[edges] = ii

    binary = 1 - canopy
    for jj in range(1, max_scan):
        ii = 1 - jj
        convolved = convolve(binary, kernel)
        contenders = np.max([binary * (kernel_dim ** 2), convolved], 0)
        edges = (contenders > 0) & (contenders < kernel_dim ** 2)
        binary[edges] = 1
        record[edges] = ii

    # correct for step size
    record = record * step_size

    record[np.isnan(record)] = ras.no_data

    # export
    dir_out = ras_dir.replace('CHM', 'DCE')
    if not os.path.exists(dir_out):
        os.makedirs(dir_out)

    ras_dce = ras
    ras_dce.data = record
    raslib.raster_save(ras_dce, file_out, data_format="float32")

    rec = raslib.raster_load(file_out)
    rec.data[rec.data != rec.no_data]
コード例 #6
0

########
batch_dir = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\ray_sampling\\batches\\lrs_mb_15_dem_.25m_61px_mp15.25\\outputs\\'
# batch_dir = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\mb_15_1m_pr.15_os10\\outputs\\'

scaling_coef = 0.19546

angle_lookup = pd.read_csv(batch_dir + "phi_theta_lookup.csv")
metalog = pd.read_csv(batch_dir + "rsgmetalog.csv")
metalog.loc[:, 'phi_deg'] = metalog.phi * 180 / np.pi

metalog.loc[:, 'weight'] = phi_weight.covar_15_norm[metalog.phi_deg.astype(int)].values


template = raslib.raster_load(batch_dir + metalog.file_name[0])
template.data = template.data[0]
template.data[template.data == template.no_data] = np.nan

lncnw = template.data.copy()
lncnw[:, :] = np.nan

for ii in range(0, len(metalog)):
    temp = tif.imread(batch_dir + metalog.file_name[ii])[:, :, 1]
    temp[temp == -9999] = np.nan
    temp = temp * scaling_coef * metalog.weight[ii]

    lncnw = np.nansum(np.concatenate((lncnw[:, :, np.newaxis], temp[:, :, np.newaxis]), axis=2), axis=2)
    print(str(ii + 1) + ' of ' + str(len(metalog)))

# clean up nans
コード例 #7
0
def main():
    """
    Creation of snow products (HS, dHS, SWE, dSWE) from processed lidar DSMs, merged CHM, merged point densities
    :return:
    """

    from libraries import raslib
    import numpy as np
    import pandas as pd
    import os

    snow_on = ["19_045", "19_050", "19_052", "19_107", "19_123"]
    snow_off = ["19_149"]
    all_dates = snow_on + snow_off

    # dict of density assumptions (key) with dates for which to be calculated (values)
    snow_on_ass = {}
    snow_on_ass["alin"] = ["19_045", "19_050", "19_052", "19_107", "19_123"]
    snow_on_ass["clin"] = ["19_045", "19_050", "19_052", "19_107", "19_123"]
    snow_on_ass["fcon"] = ["19_045", "19_050", "19_052", "19_107", "19_123"]
    snow_on_ass["ccon"] = ["19_045", "19_050", "19_052", "19_107", "19_123"]
    snow_on_ass["ahpl"] = ["19_045", "19_050", "19_052"]

    resolution = [".05", ".10", ".25", "1.00"]
    resamp_resolution = [".10", ".25", "1.00"]

    # interpolation_lengths = ["0", "1", "2", "3"]
    interpolation_lengths = ["2", "3"]

    # dict of density assumption (key) and corresponding parameters (values)
    swe_dens_ass = {}

    # coefficients for snow depth [cm], swe [mm]
    # # all veg, each day, linear depth-density
    # depth_to_density_intercept = dict(zip(snow_on, [109.1403, 110.2249, 72.5015, 224.6406, 223.5683]))
    # depth_to_density_slope = dict(zip(snow_on, np.array([1.2717, 1.2212, 1.5346, 1.7833, 1.2072])))
    # swe_dens_ass["alin"] = (depth_to_density_intercept, depth_to_density_slope)

    # # forest only, each day, linear depth-density
    # depth_to_density_intercept = dict(zip(snow_on, [147.5136, 102.460, 3.303, 249.1015, 293.10207]))
    # depth_to_density_slope = dict(zip(snow_on, np.array([1.3616, 1.486, 4.054, 0.3966, -0.03987])))
    # dens_ass["flin"] = (depth_to_density_intercept, depth_to_density_slope)
    #
    # # all, each day, constant density
    # depth_to_density_intercept = dict(zip(snow_on, np.array([191.534, 189.129, 176.066, 297.336, 298.589])))
    # depth_to_density_slope = dict(zip(snow_on, np.array([0, 0, 0, 0, 0])))
    # dens_ass["acon"] = (depth_to_density_intercept, depth_to_density_slope)
    #
    # forest only, each day, constant density
    depth_to_density_intercept = dict(
        zip(snow_on, np.array([165.05, 158.56, 134.48, 263.22, 291.14])))
    depth_to_density_slope = dict(zip(snow_on, np.array([0, 0, 0, 0, 0])))
    swe_dens_ass["fcon"] = (depth_to_density_intercept, depth_to_density_slope)

    # # clearing only, each day, constant density
    # depth_to_density_intercept = dict(zip(snow_on, np.array([189.022, 193.585, 181.896, 304.722, 303.800])))
    # depth_to_density_slope = dict(zip(snow_on, np.array([0, 0, 0, 0, 0])))
    # swe_dens_ass["ccon"] = (depth_to_density_intercept, depth_to_density_slope)

    # clearing only, each day, linear depth-density
    depth_to_density_intercept = dict(
        zip(snow_on,
            np.array([109.1403, 118.8462, 76.6577, 263.4744, 254.5358])))
    depth_to_density_slope = dict(
        zip(snow_on, np.array([1.2717, 1.0892, 1.4445, 0.9819, 0.7354])))
    swe_dens_ass["clin"] = (depth_to_density_intercept, depth_to_density_slope)

    # #
    # # hedstrom pomeroy intercept linear
    # depth_to_density_intercept = dict(zip(snow_on, np.array([89.26, 85.39, 72.05, None, None])))
    # depth_to_density_slope = dict(zip(snow_on, np.array([1.59, 1.6336, 1.5420, None, None])))
    # swe_dens_ass["ahpl"] = (depth_to_density_intercept, depth_to_density_slope)

    dswe_dens_ass = {}
    # dswe_dens_ass["fnsd"] = [85.08949, 72.235068, None, None]  # new snow density from forest SR50
    # dswe_dens_ass["cnsd"] = [96.886757, 83.370217, None, None]  # new snow density from clearing SR50
    dswe_dens_ass["ucgo"] = [196.406605, 91.346775, None,
                             None]  # new snow density from clearing SR50

    dhs_bias = [0.0136670391, -0.0531575101, None,
                None]  # dhs bias from clearing SR50 for select bias correction

    # templates for file naming and management
    dem_in_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\TEMP_FILES\\12_dem\\res_<RES>\\'
    dem_merged_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\DEM\\'
    dem_merged_file_template = '<DATE>_dem_r<RES>m.tif'

    dem_int_in_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\TEMP_FILES\\12_dem\\interpolated_res_<RES>\\'
    dem_int_merged_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\DEM\\interpolated\\'
    dem_int_merged_file_template = '<DATE>_dem_interpolated_r<RES>m.tif'

    dsm_can_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\CAN\\<DATE>_spike_free_dsm_can_r<RES>m.bil'
    chm_dir_template = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\CHM\\"
    chm_file_template = "<DATE>_spike_free_chm_r<RES>m.tif"

    hs_in_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\TEMP_FILES\\15_hs\\interp_<INTLEN>x\\res_<RES>\\'
    hs_merged_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\HS\\interp_<INTLEN>x\\'
    hs_merged_file_template = '<DATE>_hs_r<RES>m_interp<INTLEN>x.tif'

    hs_bias_file_in = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_hs_point_samples_error.csv"
    hs_bias_file_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_hs_point_samples_error_ceiling.csv"

    hs_bc_dir_template = hs_merged_dir_template + 'bias_corrected\\'
    hs_bc_file_template = hs_merged_file_template.replace(
        '.tif', '_bias_corrected.tif')

    # hs_clean_dir_template = hs_merged_dir_template + 'clean\\'  # bias corrected
    hs_clean_dir_template = hs_merged_dir_template + 'clean_no_bias\\'
    hs_clean_file_template = hs_merged_file_template.replace(
        '.tif', '_clean.tif')

    hs_resamp_dir_template = hs_merged_dir_template + 'resamp\\'
    hs_resamp_file_template = hs_clean_file_template.replace(
        '.tif', '_resamp.tif')

    # dhs_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\mb_65\\dHS\\interp_<INTLEN>x\\<DDI>-<DDJ>\\'
    dhs_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\mb_65\\dHS_no_bias\\interp_<INTLEN>x\\<DDI>-<DDJ>\\'
    dhs_file_template = 'dhs_<DDI>-<DDJ>_r<RES>m_interp<INTLEN>x.tif'

    # swe_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\SWE\\<ASS>\\interp_<INTLEN>x\\'
    swe_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\SWE_no_bias\\<ASS>\\interp_<INTLEN>x\\'
    swe_file_template = 'swe_<ASS>_<DATE>_r<RES>m_interp<INTLEN>x.tif'

    # swe_masked_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\SWE\\<ASS>\\interp_<INTLEN>x\\masked\\'
    swe_masked_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\SWE_no_bias\\<ASS>\\interp_<INTLEN>x\\masked\\'
    swe_masked_file_template = 'swe_<ASS>_<DATE>_r<RES>m_interp<INTLEN>x_masked.tif'

    # dswe_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\mb_65\\dSWE\\<ASS>\\interp_<INTLEN>x\\<DDI>-<DDJ>\\'
    dswe_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\mb_65\\dSWE_no_bias\\<ASS>\\interp_<INTLEN>x\\<DDI>-<DDJ>\\'
    dswe_file_template = 'dswe_<ASS>_<DDI>-<DDJ>_r<RES>m_interp<INTLEN>x.tif'

    # dswe_masked_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\mb_65\\dSWE\\<ASS>\\interp_<INTLEN>x\\<DDI>-<DDJ>\\masked\\'
    dswe_masked_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\mb_65\\dSWE_no_bias\\<ASS>\\interp_<INTLEN>x\\<DDI>-<DDJ>\\masked\\'
    dswe_masked_file_template = 'dswe_<ASS>_<DDI>-<DDJ>_r<RES>m_interp<INTLEN>x_masked.tif'

    point_dens_dir_template = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\<DATE>\\<DATE>_las_proc\\OUTPUT_FILES\\RAS\\'
    point_dens_file_template = '<DATE>_ground_point_density_r<RES>m.bil'

    initial_pts_file = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\surveys\\all_ground_points_UTM11N_uid_flagged_cover.csv"
    hs_uncorrected_pts_path_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_hs_point_samples_uncorrected.csv"
    hs_uncorrected_pts_path_out_sst = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_hs_point_samples_uncorrected_sst.csv"
    hs_clean_pts_path_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_hs_point_samples_clean.csv"
    hs_resamp_pts_path_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_hs_point_samples_resamp.csv"
    swe_pts_path_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_swe_point_samples.csv"
    point_dens_pts_path_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_point_density_point_samples.csv"
    dem_pts_path_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\analysis\\validation\\lidar_dem_point_samples.csv"

    # mask polygons
    snow_mask = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\snow_depth_mask.shp"
    trail_mask = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\trampled_snow_mask_dissolved.shp"

    def path_sub(path,
                 dd=None,
                 rr=None,
                 qq=None,
                 ddi=None,
                 ddj=None,
                 itn=None,
                 ass=None,
                 intlen=None):
        # function for file name parsing
        if isinstance(path, str):
            # nest pure strings in list
            path = [path]

        for ii in range(0, len(path)):
            if dd is not None:
                path[ii] = path[ii].replace('<DATE>', dd)
            if rr is not None:
                path[ii] = path[ii].replace('<RES>', rr)
            if qq is not None:
                path[ii] = path[ii].replace('<QUANT>', str(qq))
            if ddi is not None:
                path[ii] = path[ii].replace('<DDI>', str(ddi))
            if ddj is not None:
                path[ii] = path[ii].replace('<DDJ>', str(ddj))
            if itn is not None:
                path[ii] = path[ii].replace('<ITN>', str(itn))
            if ass is not None:
                path[ii] = path[ii].replace('<ASS>', str(ass))
            if intlen is not None:
                path[ii] = path[ii].replace('<INTLEN>', str(intlen))

        return ''.join(path)

    # merge all dems into single outputs (culled and interpolated)
    for dd in (snow_on + snow_off):
        # update file paths with date
        dem_out_dir = path_sub(dem_merged_dir_template, dd=dd)
        dem_int_out_dir = path_sub(dem_int_merged_dir_template, dd=dd)

        # create DEM directory if does not exist
        if not os.path.exists(dem_out_dir):
            os.makedirs(dem_out_dir)

        if not os.path.exists(dem_int_out_dir):
            os.makedirs(dem_int_out_dir)

        for rr in resolution:
            # standard
            dem_in_dir = path_sub(dem_in_dir_template, dd=dd, rr=rr)
            dem_out_file = path_sub(dem_merged_file_template, dd=dd, rr=rr)
            raslib.raster_merge(dem_in_dir,
                                '.bil',
                                dem_out_dir + dem_out_file,
                                no_data="-9999")

            # interpolated
            dem_int_in_dir = path_sub(dem_int_in_dir_template, dd=dd, rr=rr)
            dem_int_out_file = path_sub(dem_int_merged_file_template,
                                        dd=dd,
                                        rr=rr)
            raslib.raster_merge(dem_int_in_dir,
                                '.bil',
                                dem_int_out_dir + dem_int_out_file,
                                no_data="-9999")

    # create snow off CHM products
    for dd in snow_off:
        chm_out_dir = path_sub(chm_dir_template, dd=dd)

        # create directory if does not exist
        if not os.path.exists(chm_out_dir):
            os.makedirs(chm_out_dir)

        for rr in resolution:
            dem_int_in = path_sub(
                [dem_int_merged_dir_template, dem_int_merged_file_template],
                dd=dd,
                rr=rr)
            dsm_can_in = path_sub(dsm_can_template, dd=dd, rr=rr)
            chm_out = path_sub([chm_dir_template, chm_file_template],
                               dd=dd,
                               rr=rr)

            hs = raslib.raster_dif_gdal(dsm_can_in,
                                        dem_int_in,
                                        inherit_from=2,
                                        dif_out=chm_out)

    # merge snow on snow depths into single output (PST)
    for dd in snow_on:
        for intlen in interpolation_lengths:
            # update file paths with date
            hs_out_dir = path_sub(hs_merged_dir_template, dd=dd, intlen=intlen)

            # create DEM directory if does not exist
            if not os.path.exists(hs_out_dir):
                os.makedirs(hs_out_dir)

            for rr in resolution:
                hs_in_dir = path_sub(hs_in_dir_template,
                                     dd=dd,
                                     rr=rr,
                                     intlen=intlen)
                hs_out_file = path_sub(hs_merged_file_template,
                                       dd=dd,
                                       rr=rr,
                                       intlen=intlen)

                # calculate hs
                raslib.raster_merge(hs_in_dir,
                                    '.bil',
                                    hs_out_dir + hs_out_file,
                                    no_data="-9999")

    # point hs samples
    pts_file_in = initial_pts_file
    for dd in snow_on:
        for intlen in interpolation_lengths:
            for rr in resolution:
                hs_in_path = path_sub(hs_merged_dir_template +
                                      hs_merged_file_template,
                                      dd=dd,
                                      rr=rr,
                                      intlen=intlen)
                colname = str(dd) + '_' + str(rr) + '_' + str(intlen)
                raslib.csv_sample_raster(hs_in_path,
                                         pts_file_in,
                                         hs_uncorrected_pts_path_out,
                                         "xcoordUTM11",
                                         "ycoordUTM11",
                                         colname,
                                         sample_no_data_value='')
                pts_file_in = hs_uncorrected_pts_path_out

    # run r script "snow_depth_bias_correction.r"

    # # bias-correct snow depths
    # hs_bias = pd.read_csv(hs_bias_file_in).loc[:, ["day", "lidar_res", "interp_len", "hs_mb"]]
    # hs_bias.loc[:, "q_999"] = np.nan
    # hs_bias_cor_res = 0.05
    # hs_bias_cor_intlen = 2
    # for dd in snow_on:
    #     for intlen in interpolation_lengths:
    #         # update file paths with date
    #         hs_in_dir = path_sub(hs_merged_dir_template, dd=dd, intlen=intlen)
    #         hs_bc_dir = path_sub(hs_bc_dir_template, dd=dd, intlen=intlen)
    #
    #         # create DEM directory if does not exist
    #         if not os.path.exists(hs_bc_dir):
    #             os.makedirs(hs_bc_dir)
    #
    #         # read mean bias value
    #         mb = hs_bias.hs_mb[(hs_bias.day == dd) &
    #                            (hs_bias.lidar_res == hs_bias_cor_res) &
    #                            (hs_bias.interp_len == hs_bias_cor_intlen)]
    #         if len(mb) != 1:
    #             raise Exception("More than one (or no) match for snow depth bias, bias correction aborted.")
    #         # mb = mb.values[0]
    #         mb = 0  # no bias correction!
    #
    #         for rr in resolution:
    #             hs_in_file = path_sub(hs_merged_file_template, dd=dd, rr=rr, intlen=intlen)
    #
    #             # load file
    #             hs_bc_file = path_sub(hs_bc_file_template, dd=dd, rr=rr, intlen=intlen)
    #
    #             # bias correct valid hs values
    #             ras = raslib.raster_load(hs_in_dir + hs_in_file)
    #             ras.data[ras.data != ras.no_data] += -mb
    #
    #             # save
    #             raslib.raster_save(ras, hs_bc_dir + hs_bc_file)

    hs_clean_ceiling_res = '.05'
    hs_clean_ceiling_intlen = '2'  # avoid intlen 1 with res .05
    hs_clean_ceiling_quantile = 0.999  # determined visually...
    hs_bias = pd.read_csv(
        hs_bias_file_in).loc[:, ["day", "lidar_res", "interp_len", "hs_mb"]]
    hs_bias.loc[:, "ceiling_quantile"] = hs_clean_ceiling_quantile
    hs_bias.loc[:, "ceiling_res"] = hs_clean_ceiling_res
    hs_bias.loc[:, "ceiling_value"] = np.nan
    # clean snow depths (restrict to specified range)
    for dd in snow_on:
        for intlen in interpolation_lengths:
            # update file paths with date
            # hs_in_dir = path_sub(hs_bc_dir_template, dd=dd, intlen=intlen)  # uses bias corrected
            hs_in_dir = path_sub(hs_merged_dir_template, dd=dd,
                                 intlen=intlen)  # not bias corrected
            hs_clean_dir = path_sub(hs_clean_dir_template,
                                    dd=dd,
                                    intlen=intlen)

            # create DEM directory if does not exist
            if not os.path.exists(hs_clean_dir):
                os.makedirs(hs_clean_dir)

            # load file
            # hs_ceil_path = path_sub(hs_bc_dir_template + hs_bc_file_template, dd=dd, rr=hs_clean_ceiling_res, intlen=hs_clean_ceiling_intlen)
            hs_ceil_path = path_sub(
                hs_merged_dir_template + hs_merged_file_template,
                dd=dd,
                rr=hs_clean_ceiling_res,
                intlen=hs_clean_ceiling_intlen)  # not bias corr
            ras = raslib.raster_load(hs_ceil_path)

            # record quantiles
            cv = np.quantile(ras.data[ras.data != ras.no_data],
                             hs_clean_ceiling_quantile)
            hs_bias.loc[hs_bias.day == dd, "ceiling_value"] = cv

            for rr in resolution:
                # hs_in_file = path_sub(hs_bc_file_template, dd=dd, rr=rr, intlen=intlen)
                hs_in_file = path_sub(hs_merged_file_template,
                                      dd=dd,
                                      rr=rr,
                                      intlen=intlen)  # not bias corrected

                # load file
                hs_clean_file = path_sub(hs_clean_file_template,
                                         dd=dd,
                                         rr=rr,
                                         intlen=intlen)

                # send negative values to zero
                ras = raslib.raster_load(hs_in_dir + hs_in_file)
                ras.data[(ras.data < 0) & (ras.data != ras.no_data)] = 0

                # send values beyond ceiling to no_data
                ras.data[ras.data > cv] = ras.no_data

                # save
                raslib.raster_save(ras, hs_clean_dir + hs_clean_file)

    hs_bias.to_csv(hs_bias_file_out, index=False)

    # # resample points
    # for dd in snow_on:
    #     for intlen in interpolation_lengths:
    #         # update file paths with date
    #         hs_resamp_dir = path_sub(hs_resamp_dir_template, dd=dd, intlen=intlen)
    #
    #         # create DEM directory if does not exist
    #         if not os.path.exists(hs_resamp_dir):
    #             os.makedirs(hs_resamp_dir)
    #
    #         hs_data_file = path_sub(hs_clean_dir_template + hs_clean_file_template, dd=dd, rr=".05", intlen=intlen)
    #
    #         for rr in resamp_resolution:
    #             hs_format_in = path_sub(hs_clean_dir_template + hs_clean_file_template, dd=dd, rr=rr, intlen=intlen)
    #             # out file
    #             hs_resamp_out = path_sub(hs_resamp_dir_template + hs_resamp_file_template, dd=dd, rr=rr, intlen=intlen)
    #
    #             raslib.ras_reproject(hs_data_file, hs_format_in, hs_resamp_out, mode="median")

    # differential snow depth (dHS)
    for ii in range(0, len(snow_on) - 1):
        ddi = snow_on[ii]
        ddj = snow_on[ii + 1]
        for intlen in interpolation_lengths:
            # update file paths with dates
            dhs_dir = path_sub(dhs_dir_template,
                               ddi=ddi,
                               ddj=ddj,
                               intlen=intlen)

            # create sHD directory if does not exist
            if not os.path.exists(dhs_dir):
                os.makedirs(dhs_dir)

            for rr in resolution:
                ddi_in = path_sub(
                    [hs_clean_dir_template, hs_clean_file_template],
                    dd=ddi,
                    rr=rr,
                    intlen=intlen)
                ddj_in = path_sub(
                    [hs_clean_dir_template, hs_clean_file_template],
                    dd=ddj,
                    rr=rr,
                    intlen=intlen)
                dhs_out = path_sub([dhs_dir_template, dhs_file_template],
                                   ddi=ddi,
                                   ddj=ddj,
                                   rr=rr,
                                   intlen=intlen)

                hs = raslib.raster_dif_gdal(ddj_in,
                                            ddi_in,
                                            inherit_from=2,
                                            dif_out=dhs_out)

    # Select bias correction of dHS
    for ii in range(0, len(snow_on) - 1):
        if dhs_bias[ii] is not None:
            ddi = snow_on[ii]
            ddj = snow_on[ii + 1]
            for intlen in interpolation_lengths:
                # update file paths with dates
                dhs_dir = path_sub(dhs_dir_template,
                                   ddi=ddi,
                                   ddj=ddj,
                                   intlen=intlen)
                dhs_bc_dir = dhs_dir.replace("dHS_no_bias",
                                             "dHS_bias_corrected")

                # create sHD directory if does not exist
                if not os.path.exists(dhs_bc_dir):
                    os.makedirs(dhs_bc_dir)

                for rr in resolution:
                    dhs_in = dhs_dir + path_sub(dhs_file_template,
                                                ddi=ddi,
                                                ddj=ddj,
                                                rr=rr,
                                                intlen=intlen)
                    dhs_bc_out = dhs_bc_dir + path_sub(dhs_file_template,
                                                       ddi=ddi,
                                                       ddj=ddj,
                                                       rr=rr,
                                                       intlen=intlen)

                    # load dhs
                    ras = raslib.raster_load(dhs_in)
                    # correct for bias
                    ras.data[(ras.data != ras.no_data)] = ras.data[
                        (ras.data != ras.no_data)] - dhs_bias[ii]
                    # save
                    raslib.raster_save(ras, dhs_bc_out)

    # run density analysis in r

    # calculate SWE products
    for ass in swe_dens_ass.keys():
        print(ass)
        for intlen in interpolation_lengths:
            for dd in snow_on_ass[ass]:
                # update file paths with date
                swe_dir = path_sub(swe_dir_template,
                                   dd=dd,
                                   ass=ass,
                                   intlen=intlen)
                swe_masked_dir = path_sub(swe_masked_dir_template,
                                          dd=dd,
                                          ass=ass,
                                          intlen=intlen)

                # create SWE directory if does not exist
                if not os.path.exists(swe_dir):
                    os.makedirs(swe_dir)
                if not os.path.exists(swe_masked_dir):
                    os.makedirs(swe_masked_dir)

                for rr in resolution:
                    # update file paths with resolution
                    hs_file = path_sub(
                        [hs_clean_dir_template, hs_clean_file_template],
                        dd=dd,
                        rr=rr,
                        intlen=intlen)
                    swe_file = path_sub([swe_dir_template, swe_file_template],
                                        dd=dd,
                                        rr=rr,
                                        ass=ass,
                                        intlen=intlen)
                    swe_masked_file = path_sub(
                        [swe_masked_dir_template, swe_masked_file_template],
                        dd=dd,
                        rr=rr,
                        ass=ass,
                        intlen=intlen)

                    # calculate swe
                    ras = raslib.raster_load(hs_file)
                    valid_cells = np.where(ras.data != ras.no_data)
                    depth = ras.data[valid_cells]

                    # calculate swe from depth density regression
                    mm = swe_dens_ass[ass][1][dd]
                    bb = swe_dens_ass[ass][0][dd]
                    swe = depth * (mm * depth * 100 + bb)

                    ras.data[valid_cells] = swe
                    raslib.raster_save(ras, swe_file)

                    # mask
                    ras = raslib.raster_load(swe_file)  # load copy
                    raslib.raster_save(ras, swe_masked_file)  # save copy
                    raslib.raster_burn(swe_masked_file, snow_mask,
                                       ras.no_data)  # burn end of season snow
                    raslib.raster_burn(swe_masked_file, trail_mask,
                                       ras.no_data)  # burn trampled snow

    # dSWE from dHS
    for ass in dswe_dens_ass.keys():
        print(ass)
        for ii in range(0, len(snow_on) - 1):
            if dswe_dens_ass[ass][ii] is not None:
                ddi = snow_on[ii]
                ddj = snow_on[ii + 1]
                for intlen in interpolation_lengths:
                    # update file paths with dates

                    dswe_dir = path_sub(dswe_dir_template,
                                        ddi=ddi,
                                        ddj=ddj,
                                        intlen=intlen,
                                        ass=ass)
                    dswe_masked_dir = path_sub(dswe_masked_dir_template,
                                               ddi=ddi,
                                               ddj=ddj,
                                               ass=ass,
                                               intlen=intlen)

                    if not os.path.exists(dswe_dir):
                        os.makedirs(dswe_dir)
                    if not os.path.exists(dswe_masked_dir):
                        os.makedirs(dswe_masked_dir)
                    if not os.path.exists(
                            dswe_dir.replace("dSWE_no_bias",
                                             "dSWE_bias_corrected")):
                        os.makedirs(
                            dswe_dir.replace("dSWE_no_bias",
                                             "dSWE_bias_corrected"))
                    if not os.path.exists(
                            dswe_masked_dir.replace("dSWE_no_bias",
                                                    "dSWE_bias_corrected")):
                        os.makedirs(
                            dswe_masked_dir.replace("dSWE_no_bias",
                                                    "dSWE_bias_corrected"))

                    for rr in resolution:

                        # no bias
                        dhs_in = path_sub(
                            [dhs_dir_template, dhs_file_template],
                            ddi=ddi,
                            ddj=ddj,
                            rr=rr,
                            intlen=intlen)
                        dswe_out = path_sub(
                            [dswe_dir_template, dswe_file_template],
                            ddi=ddi,
                            ddj=ddj,
                            rr=rr,
                            intlen=intlen,
                            ass=ass)
                        dswe_masked_file = path_sub([
                            dswe_masked_dir_template, dswe_masked_file_template
                        ],
                                                    ddi=ddi,
                                                    ddj=ddj,
                                                    rr=rr,
                                                    intlen=intlen,
                                                    ass=ass)

                        # load data
                        ras = raslib.raster_load(dhs_in)

                        # multiply differential depth by new snow density
                        ras.data[ras.data != ras.no_data] = ras.data[
                            ras.data != ras.no_data] * dswe_dens_ass[ass][ii]

                        # save dswe
                        raslib.raster_save(ras, dswe_out)

                        # mask
                        ras = raslib.raster_load(dswe_out)  # load copy
                        raslib.raster_save(ras, dswe_masked_file)  # save copy
                        raslib.raster_burn(dswe_masked_file, trail_mask,
                                           ras.no_data)  # burn trampled snow

                        # bias_corrected
                        dhs_in = path_sub(
                            [dhs_dir_template, dhs_file_template],
                            ddi=ddi,
                            ddj=ddj,
                            rr=rr,
                            intlen=intlen)
                        dhs_in = dhs_in.replace("dHS_no_bias",
                                                "dHS_bias_corrected")
                        dswe_out = path_sub(
                            [dswe_dir_template, dswe_file_template],
                            ddi=ddi,
                            ddj=ddj,
                            rr=rr,
                            intlen=intlen,
                            ass=ass)
                        dswe_out = dswe_out.replace("dSWE_no_bias",
                                                    "dSWE_bias_corrected")
                        dswe_masked_file = path_sub([
                            dswe_masked_dir_template, dswe_masked_file_template
                        ],
                                                    ddi=ddi,
                                                    ddj=ddj,
                                                    rr=rr,
                                                    intlen=intlen,
                                                    ass=ass)
                        dswe_masked_file = dswe_masked_file.replace(
                            "dSWE_no_bias", "dSWE_bias_corrected")

                        # load data
                        ras = raslib.raster_load(dhs_in)

                        # multiply differential depth by new snow density
                        ras.data[ras.data != ras.no_data] = ras.data[
                            ras.data != ras.no_data] * dswe_dens_ass[ass][ii]

                        # save dswe
                        raslib.raster_save(ras, dswe_out)

                        # mask
                        ras = raslib.raster_load(dswe_out)  # load copy
                        raslib.raster_save(ras, dswe_masked_file)  # save copy
                        raslib.raster_burn(dswe_masked_file, trail_mask,
                                           ras.no_data)  # burn trampled snow

    # # differential SWE products
    # for ass in swe_dens_ass.keys():
    #     print(ass)
    #     for intlen in interpolation_lengths:
    #         for ii in range(0, len(snow_on_ass[ass]) - 1):
    #             ddi = snow_on[ii]
    #             ddj = snow_on[ii + 1]
    #
    #             # update file paths with dates
    #             dswe_dir = path_sub(dswe_dir_template, ddi=ddi, ddj=ddj, ass=ass, intlen=intlen)
    #             dswe_masked_dir = path_sub(dswe_masked_dir_template, ddi=ddi, ddj=ddj, ass=ass, intlen=intlen)
    #
    #             # create SWE directory if does not exist
    #             if not os.path.exists(dswe_dir):
    #                 os.makedirs(dswe_dir)
    #             if not os.path.exists(dswe_masked_dir):
    #                 os.makedirs(dswe_masked_dir)
    #
    #             for rr in resolution:
    #                 ddi_in = path_sub([swe_dir_template, swe_file_template], dd=ddi, rr=rr, ass=ass, intlen=intlen)
    #                 ddj_in = path_sub([swe_dir_template, swe_file_template], dd=ddj, rr=rr, ass=ass, intlen=intlen)
    #                 dswe_file = path_sub([dswe_dir_template, dswe_file_template], ddi=ddi, ddj=ddj, rr=rr, ass=ass, intlen=intlen)
    #                 dswe_masked_file = path_sub([dswe_masked_dir_template, dswe_masked_file_template], ddi=ddi, ddj=ddj, rr=rr, ass=ass, intlen=intlen)
    #
    #                 dswe_ras = rastools.raster_dif_gdal(ddj_in, ddi_in, inherit_from=2, dif_out=dswe_file)
    #
    #                 # mask
    #                 ras = rastools.raster_load(dswe_file)
    #                 rastools.raster_save(ras, dswe_masked_file)
    #
    #                 rastools.raster_burn(dswe_masked_file, snow_mask, ras.no_data)
    #                 rastools.raster_burn(dswe_masked_file, trail_mask, ras.no_data)

    # point samples of hs
    pts_file_in = initial_pts_file
    for dd in snow_on:
        for intlen in interpolation_lengths:
            for rr in resolution:
                try:
                    hs_clean_path = path_sub(hs_clean_dir_template +
                                             hs_clean_file_template,
                                             dd=dd,
                                             rr=rr,
                                             intlen=intlen)
                    colname = str(dd) + '_' + str(rr) + '_' + str(intlen)
                    raslib.csv_sample_raster(hs_clean_path,
                                             pts_file_in,
                                             hs_clean_pts_path_out,
                                             "xcoordUTM11",
                                             "ycoordUTM11",
                                             colname,
                                             sample_no_data_value='')
                    pts_file_in = hs_clean_pts_path_out

                except AttributeError:
                    print('File does not exist')
    #
    # # resampled points
    # pts_file_in = initial_pts_file
    # for dd in snow_on:
    #     for intlen in interpolation_lengths:
    #         for rr in resamp_resolution:
    #             try:
    #                 hs_resamp_path = path_sub(hs_resamp_dir_template + hs_resamp_file_template, dd=dd, rr=rr, intlen=intlen)
    #                 colname = str(dd) + '_' + str(rr) + '_' + str(intlen)
    #                 rastools.csv_sample_raster(hs_resamp_path, pts_file_in, hs_resamp_pts_path_out, "xcoordUTM11",
    #                                            "ycoordUTM11", colname, sample_no_data_value='')
    #                 pts_file_in = hs_resamp_pts_path_out
    #
    #             except AttributeError:
    #                 print('File does not exist')

    # point samples of swe
    pts_file_in = initial_pts_file
    for ass in swe_dens_ass.keys():
        for dd in snow_on_ass[ass]:
            for intlen in interpolation_lengths:
                for rr in resolution:
                    try:
                        swe_path = path_sub(swe_dir_template +
                                            swe_file_template,
                                            dd=dd,
                                            rr=rr,
                                            ass=ass,
                                            intlen=intlen)
                        colname = str(dd) + '_' + str(rr) + '_' + str(
                            ass) + '_' + str(intlen)
                        raslib.csv_sample_raster(swe_path,
                                                 pts_file_in,
                                                 swe_pts_path_out,
                                                 "xcoordUTM11",
                                                 "ycoordUTM11",
                                                 colname,
                                                 sample_no_data_value='')
                        pts_file_in = swe_pts_path_out
                    except AttributeError:
                        print('File does not exist')

    # point samples of point density
    pts_file_in = initial_pts_file
    for dd in snow_on + snow_off:
        for rr in [".10", ".25"]:
            point_dens_path = path_sub(point_dens_dir_template +
                                       point_dens_file_template,
                                       dd=dd,
                                       rr=rr)
            colname = str(dd) + '_' + str(rr)

            raslib.csv_sample_raster(point_dens_path,
                                     pts_file_in,
                                     point_dens_pts_path_out,
                                     "xcoordUTM11",
                                     "ycoordUTM11",
                                     colname,
                                     sample_no_data_value='')
            pts_file_in = point_dens_pts_path_out

            print(rr)

    # point samples of interpolated dem
    pts_file_in = initial_pts_file
    for dd in (snow_on + snow_off):
        for rr in resolution:
            for intlen in interpolation_lengths:
                dem_path = path_sub(dem_merged_dir_template +
                                    dem_merged_file_template,
                                    dd=dd,
                                    rr=rr)
                colname = str(dd) + '_' + str(rr)

                raslib.csv_sample_raster(dem_path,
                                         pts_file_in,
                                         dem_pts_path_out,
                                         "xcoordUTM11",
                                         "ycoordUTM11",
                                         colname,
                                         sample_no_data_value='')
                pts_file_in = dem_pts_path_out

                print(rr)
コード例 #8
0
def pd_to_ras(df, colname, template_in, file_out):
    temp = raslib.raster_load(template_in)
    temp.data[:, :] = temp.no_data
    temp.data[df.y_index, df.x_index] = df.loc[:, colname]

    raslib.raster_save(temp, file_out)
コード例 #9
0
def main():
    """
    Build grid of points with consistent indexing at various resolutions
    :return:
    """

    import pandas as pd
    import numpy as np
    from libraries import raslib
    import os

    batch_dir = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\hemi_grid_points\\mb_65_r.25m_snow_on_offset0\\'

    # build point list from DEM
    dem_in = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_052\\19_052_las_proc\\OUTPUT_FILES\\DEM\\interpolated\\19_052_dem_interpolated_r.25m.tif'  # snow-on
    # dem_in = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_las_proc\\OUTPUT_FILES\\DEM\\interpolated\\19_149_dem_interpolated_r.25m.tif'  # snow-off

    vertical_offset = 0

    mb_65_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\mb_65_poly.shp'
    mb_15_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\mb_15_poly.shp'
    uf_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\upper_forest_poly_UTM11N.shp'
    uc_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\upper_clearing_poly_UTM11N.shp'

    # for plot mappings
    resolution = ['.05', '.10', '.25', '1.00']
    template_scheme = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_las_proc\\OUTPUT_FILES\\TEMPLATES\\19_149_all_point_density_r<RES>m.bil'

    # dem_in = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\hemi_lookups\\19_149_dem_r1.00m_q0.25_interpolated_min1.tif'
    # site_poly = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\hemi_lookups\\upper_forest_poly_UTM11N.shp'
    # batch_dir = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\uf_1m_pr_0_os_0.5\\'

    # create batch dir if does not exist
    if not os.path.exists(batch_dir):
        os.makedirs(batch_dir)

    pts = raslib.raster_to_pd(dem_in, 'z_m', include_nans=True)
    pts.z_m = pts.z_m + vertical_offset  # shift z_m by vertical offset

    # add point id
    pts = pts.reset_index()
    pts.columns = ['id', 'x_utm11n', 'y_utm11n', 'x_index', 'y_index', 'z_m']

    # # add flag for mb_65
    # load dem as template
    site_plot = raslib.raster_load(dem_in)
    # fill data with zeros
    site_plot.data = np.full((site_plot.rows, site_plot.cols), 0)
    # save to file
    mb_65_plot_dir = batch_dir + 'mb_65_plot_over_dem.tiff'
    raslib.raster_save(site_plot, mb_65_plot_dir, data_format='byte')
    # burn site polygon into plot data as ones
    raslib.raster_burn(mb_65_plot_dir, mb_65_poly, 1)
    # load plot data
    mb_65_plot = raslib.raster_load(mb_65_plot_dir)

    # # add flag for mb_15
    # load template
    site_plot = raslib.raster_load(dem_in)
    # fill data with zeros
    site_plot.data = np.full((site_plot.rows, site_plot.cols), 0)
    # save to file
    mb_15_plot_dir = batch_dir + 'mb_15_plot_over_dem.tiff'
    raslib.raster_save(site_plot, mb_15_plot_dir, data_format='byte')
    # burn site polygon into plot data as ones
    raslib.raster_burn(mb_15_plot_dir, mb_15_poly, 1)
    # load plot data
    mb_15_plot = raslib.raster_load(mb_15_plot_dir)

    # # add flag for (UF)
    # load template
    site_plot = raslib.raster_load(dem_in)
    # fill data with zeros
    site_plot.data = np.full((site_plot.rows, site_plot.cols), 0)
    # save to file
    uf_plot_dir = batch_dir + 'uf_plot_over_dem.tiff'
    raslib.raster_save(site_plot, uf_plot_dir, data_format='byte')
    # burn site polygon into plot data as ones
    raslib.raster_burn(uf_plot_dir, uf_poly, 1)
    # load plot data
    uf_plot = raslib.raster_load(uf_plot_dir)

    # merge plot data with points
    pts_index = (pts.y_index.values, pts.x_index.values)
    pts = pts.assign(mb_65=mb_65_plot.data[pts_index].astype(bool),
                     mb_15=mb_15_plot.data[pts_index].astype(bool),
                     uf=uf_plot.data[pts_index].astype(bool))

    # export point lookup as csv
    pts_dir = batch_dir + 'dem_r.25_points.csv'
    pts.to_csv(pts_dir, index=False)

    # format point ids as raster
    id_raster = raslib.raster_load(dem_in)
    id_raster.data = np.full([id_raster.rows, id_raster.cols],
                             id_raster.no_data).astype(int)
    id_raster.data[pts_index] = pts.id
    # save id raster to file
    id_raster_out = batch_dir + 'dem_r.25_point_ids.tif'
    raslib.raster_save(id_raster, id_raster_out, data_format="int32")

    # point subsets
    pts_mb_65 = pts[pts.mb_65]
    pts_dir = batch_dir + 'dem_r.25_points_mb_65.csv'
    pts_mb_65.to_csv(pts_dir, index=False)

    pts_mb_15 = pts[pts.mb_15]
    pts_dir = batch_dir + 'dem_r.25_points_mb_15.csv'
    pts_mb_15.to_csv(pts_dir, index=False)

    pts_uf = pts[pts.uf]
    pts_dir = batch_dir + 'dem_r.25_points_uf.csv'
    pts_uf.to_csv(pts_dir, index=False)

    # create cookie cutters of sites for each resolution
    for rr in resolution:
        file_out = 'uf_plot_r' + rr + 'm.tif'
        site_poly = uf_poly
        template_in = template_scheme.replace('<RES>', rr)
        ras = raslib.raster_load(template_in)
        ras.data = np.full((ras.rows, ras.cols), 0)
        ras.no_data = 0
        ras_out = batch_dir + file_out
        raslib.raster_save(ras, ras_out, data_format='byte')
        raslib.raster_burn(ras_out, site_poly, 1)

    for rr in resolution:
        file_out = 'uc_plot_r' + rr + 'm.tif'
        site_poly = uc_poly
        template_in = template_scheme.replace('<RES>', rr)
        ras = raslib.raster_load(template_in)
        ras.data = np.full((ras.rows, ras.cols), 0)
        ras.no_data = 0
        ras_out = batch_dir + file_out
        raslib.raster_save(ras, ras_out, data_format='byte')
        raslib.raster_burn(ras_out, site_poly, 1)

    for rr in resolution:
        file_out = 'site_plots_r' + rr + 'm.tif'
        template_in = template_scheme.replace('<RES>', rr)
        ras = raslib.raster_load(template_in)
        ras.data = np.full((ras.rows, ras.cols), 0)
        ras.no_data = 0
        ras_out = batch_dir + file_out
        raslib.raster_save(ras, ras_out, data_format='uint16')
        raslib.raster_burn(ras_out, uf_poly, 1)
        raslib.raster_burn(ras_out, uc_poly, 2)

    for rr in resolution:
        file_out = 'mb_15_plot_r' + rr + 'm.tif'
        site_poly = mb_15_poly
        template_in = template_scheme.replace('<RES>', rr)
        ras = raslib.raster_load(template_in)
        ras.data = np.full((ras.rows, ras.cols), 0)
        ras.no_data = 0
        ras_out = batch_dir + file_out
        raslib.raster_save(ras, ras_out, data_format='byte')
        raslib.raster_burn(ras_out, site_poly, 1)

    for rr in resolution:
        file_out = 'mb_65_plot_r' + rr + 'm.tif'
        site_poly = mb_65_poly
        template_in = template_scheme.replace('<RES>', rr)
        ras = raslib.raster_load(template_in)
        ras.data = np.full((ras.rows, ras.cols), 0)
        ras.no_data = 0
        ras_out = batch_dir + file_out
        raslib.raster_save(ras, ras_out, data_format='byte')
        raslib.raster_burn(ras_out, site_poly, 1)
コード例 #10
0
    # outputs
    dir_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\%DOY%\\%DOY%_las_proc\\OUTPUT_FILES\\LPM\\".replace(
        "%DOY%", doy)
    lpmf_out = "%DOY%_LPM-first_a%ANGLE%_r0.10m.tif".replace(
        '%ANGLE%', str(angle)).replace("%DOY%", doy)
    fcov_out = "%DOY%_fcov_a%ANGLE%_r0.10m.tif".replace('%ANGLE%',
                                                        str(angle)).replace(
                                                            "%DOY%", doy)
    lpml_out = "%DOY%_LPM-last_a%ANGLE%_r0.10m.tif".replace(
        '%ANGLE%', str(angle)).replace("%DOY%", doy)
    lpmc_out = "%DOY%_LPM-canopy_a%ANGLE%_r0.10m.tif".replace(
        '%ANGLE%', str(angle)).replace("%DOY%", doy)

    # load raster data in
    FG = raslib.raster_load(dir_in + FG_in)
    LG = raslib.raster_load(dir_in + LG_in)
    FC = raslib.raster_load(dir_in + FC_in)

    # store no-data value
    no_data = FG.no_data

    # set no_data points to np.nan
    FG.data[FG.data == no_data] = 0
    LG.data[LG.data == no_data] = 0
    FC.data[FC.data == no_data] = 0

    # do calculations
    lpmf = raslib.raster_load(dir_in + FG_in)
    num = FG.data
    denom = (FG.data + FC.data)
コード例 #11
0
def las_quantile_dem(las_in,
                     ras_template,
                     q,
                     q_out=None,
                     n_out=None,
                     las_ground_class=2):
    """
    Produces a raster DEM from classified point cloud by calculating a pixel-wise quantile
    :param las_in: path to LAS point cloud file
    :param ras_template: path to a geotif image from which pixel binning will be inherited
    :param q: quantile in range [0, 1] quich will be calculated for each pixel
    :param q_out: path of resultant quantile product (optional)
    :param n_out: path of resultant cell point count product (optional)
    :param las_ground_class: class of points representing ground
    :return: quantile product, count product
    """

    from libraries import raslib
    import scipy.stats
    import numpy as np

    # load las ground points
    las = las_xyz_load(las_in, keep_class=las_ground_class)

    # load template raster for pixel geometry
    ras = raslib.raster_load(ras_template)

    # calculate bins
    x_bins = (ras.T0 * (np.linspace(0, ras.cols, ras.cols + 1), 0))[0]
    y_bins = (ras.T0 * (0, np.linspace(0, ras.rows, ras.rows + 1)))[1]
    ras_bins = [y_bins, x_bins]

    # rectify bins
    rectified = [False, False]
    for ii in [0, 1]:
        if ras_bins[ii][0] > ras_bins[ii][-1]:
            ras_bins[ii] = np.flip(ras_bins[ii])
            rectified[ii] = True

    print('Computing counts... ', end='')
    stat_n, xEdges, yEdges, binnumber = scipy.stats.binned_statistic_2d(
        las[:, 1], las[:, 0], las[:, 2], statistic='count', bins=ras_bins)
    print('done')

    print('Computing quantile... ')

    def quantile_q(x):
        return np.quantile(x, q)

    # preallocate stat_q
    stat_q = np.full((ras.rows, ras.cols), np.nan)

    # for each column
    for ii in range(0, ras.cols):
        # select all points in column
        stripe_points = (las[:, 0] > ras_bins[1][ii]) & (
            las[:, 0] < ras_bins[1][ii + 1]
        )  # all y values, within x value range
        las_sub = las[stripe_points, :]

        if las_sub.size > 0:
            # calculate quantile
            stat_q_col, yEdges, binnumber = scipy.stats.binned_statistic(
                las_sub[:, 1],
                las_sub[:, 2],
                statistic=quantile_q,
                bins=ras_bins[0])
            # save to composite output
            stat_q[:, ii] = stat_q_col

        # advance start bound
        print('column ' + str(ii + 1) + ' of ' + str(ras.cols))

    # undo rectification
    for ii in [0, 1]:
        if rectified[ii]:
            stat_n = np.flip(stat_n, ii)
            stat_q = np.flip(stat_q, ii)

    # save outputs to file
    if q_out is not None:
        # output quantile
        q_ras = raslib.raster_load(ras_template)
        q_ras.data = stat_q
        q_ras.data[np.isnan(q_ras.data)] = q_ras.no_data
        raslib.raster_save(q_ras, q_out, data_format='float32')

    if n_out is not None:
        # output count
        n_ras = raslib.raster_load(ras_template)
        n_ras.data = stat_n
        n_ras.data[np.isnan(n_ras.data)] = n_ras.no_data
        raslib.raster_save(n_ras, n_out, data_format='float32')

    return stat_q, stat_n
コード例 #12
0
ax1.set_title('Frequency distributions of light transmittance\n Forest plot, 10cm resolution, snow-free canopy')
ax1.set_xlabel("Transmittance [-]")
ax1.set_ylabel("Relative frequency [-]")
g = sns.histplot(ab, x="lpm", hue="method", stat="density", common_norm=False, element="step", bins=40)
g.legend_.set_title(None)
legend = g.get_legend()
handles = legend.legendHandles
legend.remove()
g.legend(handles, ["LPM-First", "LPM-Last", "LPM-Canopy"], loc="upper center")
fig.savefig(plot_out_dir + "freq_dist_trans_uf.png")

# plotting samples in space

# need raster template of upper forest..
uf_in = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\hemi_grid_points\\mb_65_r.25m_snow_off_offset0\\uf_plot_r.05m.tif'
uf = raslib.raster_load(uf_in)

min_p = np.min(np.where(uf.data), axis=1)
max_p = np.max(np.where(uf.data), axis=1)

full = uf.data.copy()

limited = uf.data.copy()
limited[:, :] = 0
limited[data_uf.y_index[~np.isnan(data_uf.loc[:, "19_050_hs"])], data_uf.x_index[~np.isnan(data_uf.loc[:, "19_050_hs"])]] = 1

resamp = uf.data.copy()
resamp[:, :] = 0
resamp[hs_050_uf.y_index, hs_050_uf.x_index] = 1

コード例 #13
0
from libraries import raslib
import vaex
import matplotlib.pylab as plt
import numpy as np

# products to import

# snow depth .10m
hs_in = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\hs\\19_045\\hs_19_045_res_.04m.tif"
dft_in = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_snow_off\\OUTPUT_FILES\\DNT\\19_149_snow_off_627975_5646450_spike_free_chm_.10m_kho_distance_.10m.tif"
hs_10_in = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\hs\\19_045\\hs_19_045_res_.10m.tif"

hs_hdf5 = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\hs\\19_045\\hs_19_045_res_.04m.hdf5"
hs_dft_hdf5 = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\hs\\19_045\\hs_19_045_res_.04m._dft.hdf5"

hs = raslib.raster_load(hs_in)
# dft = rastools.raster_load(dft_in)

# send hs to hdf5
raslib.raster_to_hdf5(hs_in, hs_hdf5, "hs_04m")

# sample site
# create raster of false values
site_shp_path = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\sub_plot_library\\forest_upper.shp"
site_raster_path = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\upper_forest_poly_UTM11N.tif"

template = raslib.raster_load(hs_in)
template.data = np.full([template.rows, template.cols], 0)
template.no_data = 0
raslib.raster_save(template, site_raster_path, data_format="int16")
# burn in upper forest site as true values