covar_out = batch_dir + "phi_theta_lookup_lin_covar_training.csv" weighted_cv_out = batch_dir + "rshmetalog_lin_weighted_cv.csv" # covar type globalBool = False localBool = True scaling_coef = 0.19546 # load img meta hemimeta = pd.read_csv(batch_dir + 'rshmetalog.csv') imsize = hemimeta.img_size_px[0] # merge with covariant var_in = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\mb_65\\dSWE\\19_045-19_050\\dswe_19_045-19_050_r.25m.tif' var = raslib.raster_to_pd(var_in, 'covariant') hemi_var = pd.merge(hemimeta, var, left_on=('x_utm11n', 'y_utm11n'), right_on=('x_coord', 'y_coord'), how='inner') # load angle template angle_lookup = pd.read_csv(batch_dir + "phi_theta_lookup.csv") phi = np.full((imsize, imsize), np.nan) phi[(np.array(angle_lookup.x_index), np.array(angle_lookup.y_index))] = angle_lookup.phi * 180 / np.pi max_phi = 90 # in degrees # filter to desired images #hemiList = hemi_swe.loc[(hemi_swe.swe.values >= 0) & (hemi_swe.swe.values <= 150), :]
def main(): """ Build grid of points with consistent indexing at various resolutions :return: """ import pandas as pd import numpy as np from libraries import raslib import os batch_dir = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\hemi_grid_points\\mb_65_r.25m_snow_on_offset0\\' # build point list from DEM dem_in = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_052\\19_052_las_proc\\OUTPUT_FILES\\DEM\\interpolated\\19_052_dem_interpolated_r.25m.tif' # snow-on # dem_in = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_las_proc\\OUTPUT_FILES\\DEM\\interpolated\\19_149_dem_interpolated_r.25m.tif' # snow-off vertical_offset = 0 mb_65_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\mb_65_poly.shp' mb_15_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\mb_15_poly.shp' uf_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\upper_forest_poly_UTM11N.shp' uc_poly = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\site_library\\upper_clearing_poly_UTM11N.shp' # for plot mappings resolution = ['.05', '.10', '.25', '1.00'] template_scheme = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_las_proc\\OUTPUT_FILES\\TEMPLATES\\19_149_all_point_density_r<RES>m.bil' # dem_in = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\hemi_lookups\\19_149_dem_r1.00m_q0.25_interpolated_min1.tif' # site_poly = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\hemi_lookups\\upper_forest_poly_UTM11N.shp' # batch_dir = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\uf_1m_pr_0_os_0.5\\' # create batch dir if does not exist if not os.path.exists(batch_dir): os.makedirs(batch_dir) pts = raslib.raster_to_pd(dem_in, 'z_m', include_nans=True) pts.z_m = pts.z_m + vertical_offset # shift z_m by vertical offset # add point id pts = pts.reset_index() pts.columns = ['id', 'x_utm11n', 'y_utm11n', 'x_index', 'y_index', 'z_m'] # # add flag for mb_65 # load dem as template site_plot = raslib.raster_load(dem_in) # fill data with zeros site_plot.data = np.full((site_plot.rows, site_plot.cols), 0) # save to file mb_65_plot_dir = batch_dir + 'mb_65_plot_over_dem.tiff' raslib.raster_save(site_plot, mb_65_plot_dir, data_format='byte') # burn site polygon into plot data as ones raslib.raster_burn(mb_65_plot_dir, mb_65_poly, 1) # load plot data mb_65_plot = raslib.raster_load(mb_65_plot_dir) # # add flag for mb_15 # load template site_plot = raslib.raster_load(dem_in) # fill data with zeros site_plot.data = np.full((site_plot.rows, site_plot.cols), 0) # save to file mb_15_plot_dir = batch_dir + 'mb_15_plot_over_dem.tiff' raslib.raster_save(site_plot, mb_15_plot_dir, data_format='byte') # burn site polygon into plot data as ones raslib.raster_burn(mb_15_plot_dir, mb_15_poly, 1) # load plot data mb_15_plot = raslib.raster_load(mb_15_plot_dir) # # add flag for (UF) # load template site_plot = raslib.raster_load(dem_in) # fill data with zeros site_plot.data = np.full((site_plot.rows, site_plot.cols), 0) # save to file uf_plot_dir = batch_dir + 'uf_plot_over_dem.tiff' raslib.raster_save(site_plot, uf_plot_dir, data_format='byte') # burn site polygon into plot data as ones raslib.raster_burn(uf_plot_dir, uf_poly, 1) # load plot data uf_plot = raslib.raster_load(uf_plot_dir) # merge plot data with points pts_index = (pts.y_index.values, pts.x_index.values) pts = pts.assign(mb_65=mb_65_plot.data[pts_index].astype(bool), mb_15=mb_15_plot.data[pts_index].astype(bool), uf=uf_plot.data[pts_index].astype(bool)) # export point lookup as csv pts_dir = batch_dir + 'dem_r.25_points.csv' pts.to_csv(pts_dir, index=False) # format point ids as raster id_raster = raslib.raster_load(dem_in) id_raster.data = np.full([id_raster.rows, id_raster.cols], id_raster.no_data).astype(int) id_raster.data[pts_index] = pts.id # save id raster to file id_raster_out = batch_dir + 'dem_r.25_point_ids.tif' raslib.raster_save(id_raster, id_raster_out, data_format="int32") # point subsets pts_mb_65 = pts[pts.mb_65] pts_dir = batch_dir + 'dem_r.25_points_mb_65.csv' pts_mb_65.to_csv(pts_dir, index=False) pts_mb_15 = pts[pts.mb_15] pts_dir = batch_dir + 'dem_r.25_points_mb_15.csv' pts_mb_15.to_csv(pts_dir, index=False) pts_uf = pts[pts.uf] pts_dir = batch_dir + 'dem_r.25_points_uf.csv' pts_uf.to_csv(pts_dir, index=False) # create cookie cutters of sites for each resolution for rr in resolution: file_out = 'uf_plot_r' + rr + 'm.tif' site_poly = uf_poly template_in = template_scheme.replace('<RES>', rr) ras = raslib.raster_load(template_in) ras.data = np.full((ras.rows, ras.cols), 0) ras.no_data = 0 ras_out = batch_dir + file_out raslib.raster_save(ras, ras_out, data_format='byte') raslib.raster_burn(ras_out, site_poly, 1) for rr in resolution: file_out = 'uc_plot_r' + rr + 'm.tif' site_poly = uc_poly template_in = template_scheme.replace('<RES>', rr) ras = raslib.raster_load(template_in) ras.data = np.full((ras.rows, ras.cols), 0) ras.no_data = 0 ras_out = batch_dir + file_out raslib.raster_save(ras, ras_out, data_format='byte') raslib.raster_burn(ras_out, site_poly, 1) for rr in resolution: file_out = 'site_plots_r' + rr + 'm.tif' template_in = template_scheme.replace('<RES>', rr) ras = raslib.raster_load(template_in) ras.data = np.full((ras.rows, ras.cols), 0) ras.no_data = 0 ras_out = batch_dir + file_out raslib.raster_save(ras, ras_out, data_format='uint16') raslib.raster_burn(ras_out, uf_poly, 1) raslib.raster_burn(ras_out, uc_poly, 2) for rr in resolution: file_out = 'mb_15_plot_r' + rr + 'm.tif' site_poly = mb_15_poly template_in = template_scheme.replace('<RES>', rr) ras = raslib.raster_load(template_in) ras.data = np.full((ras.rows, ras.cols), 0) ras.no_data = 0 ras_out = batch_dir + file_out raslib.raster_save(ras, ras_out, data_format='byte') raslib.raster_burn(ras_out, site_poly, 1) for rr in resolution: file_out = 'mb_65_plot_r' + rr + 'm.tif' site_poly = mb_65_poly template_in = template_scheme.replace('<RES>', rr) ras = raslib.raster_load(template_in) ras.data = np.full((ras.rows, ras.cols), 0) ras.no_data = 0 ras_out = batch_dir + file_out raslib.raster_save(ras, ras_out, data_format='byte') raslib.raster_burn(ras_out, site_poly, 1)
plot = df.hvplot(kind="scatter", x="col1", y="col2") show(hv.render(plot)) # basic datashader import datashader as ds import pandas as pd import numpy as np import datashader.transfer_functions as tf from datashader.utils import export_image hs_in = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\products\\hs\\19_052\\hs_19_052_res_.10m.tif" dnt_in = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_snow_off\\OUTPUT_FILES\\DNT\\19_149_snow_off_627975_5646450_spike_free_chm_.10m_kho_distance_.10m.tif" img_out = "C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\graphics\\ds_test_hs_vs_dnt.png" # load parent parent = raslib.raster_to_pd(hs_in, 'hs') merged = raslib.pd_sample_raster(parent, dnt_in, 'dnt') import matplotlib matplotlib.use('TkAgg') import matplotlib.pyplot as plt plt.scatter(merged.hs, merged.dnt) cvs = ds.Canvas(plot_width=400, plot_height=400) agg = cvs.points(data, 'hs', 'dnt', agg=ds.count('dnt')) img = tf.shade(agg, cmap=['lightblue', 'darkblue'], how='log') export_image(img, img_out) ##### # datashader + holoviews + matplotlib
# it appears that the x # check out the image of both... import numpy as np data = data_4 img = np.full((np.max(data.y_index) + 1, np.max(data.x_index) + 1), np.nan) img[data.y_index, data.x_index] = data.count_4 import matplotlib matplotlib.use('TkAgg') import matplotlib.pyplot as plt plt.imshow(img, interpolation='nearest') ras = raslib.raster_load(ras_1_in) # [y_index, x_index] = ~T * [x_coord, y_coord] data_1 = raslib.raster_to_pd(ras_1_in, 'count_1') train = ~ras.T1 * (data_1.x_coord, data_1.y_coord) np.max(np.array(data_1.x_index) - np.array(train[1])) np.max(np.array(data_1.y_index) - np.array(train[0])) # [x_coord, y_coord] = T * [y_index, x_index] peace = ras.T1 * (data_1.x_index, data_1.y_index) np.all(np.array(data_1.x_coord) == np.array(peace[0])) np.all(np.array(data_1.y_coord) == np.array(peace[1])) # plot of count_1 and count_2 in parent index data_5 = raslib.pd_sample_raster(None, None, ras_5_in,
import numpy as np from PIL import Image import pandas as pd # batch_dir = 'C:\\Users\\Cob\\index\\educational\\usask\\research\\masters\\data\\lidar\\19_149\\19_149_snow_off\\OUTPUT_FILES\\synthetic_hemis\\uf_1m_pr_.15_os_10\\outputs\\' batch_dir = 'C:\\Users\\jas600\\workzone\\data\\hemigen\\mb_15_1m_pr.15_os10\\outputs\\' imsize = 1000 globalBool = True localBool = True # load img meta hemimeta = pd.read_csv(batch_dir + 'hemimetalog.csv') # merge with swe swe_in = 'C:\\Users\\jas600\\workzone\\data\\cross_covar\\swe_19_045_r1.00m_q0.25.tif' swe = raslib.raster_to_pd(swe_in, 'swe') hemi_swe = pd.merge(hemimeta, swe, left_on=('x_utm11n', 'y_utm11n'), right_on=('x_coord', 'y_coord'), how='inner') # filter to desited images hemiList = hemi_swe.loc[(hemi_swe.swe.values >= 0) & (hemi_swe.swe.values <= 150), :] # covar type # stack binary canopy data threshold = 128