def __init__(self): WhiteboxTools.__init__(self) #self.set_verbose_mode(False) self.scratch_dir = os.path.join(os.getcwd(), 'intermediate_data', 'scratch') if not os.path.exists(self.scratch_dir): os.makedirs(self.scratch_dir) for file in os.listdir(self.scratch_dir): os.remove(os.path.join(self.scratch_dir, file))
def clip_lakes(s_file,wb_file,outfile): # Initialize whitebox tools wbt = WhiteboxTools() # Sets the Whitebox working directory #wbt.work_dir = "" # Clip the streams to exclude lakes wbt.erase(s_file, wb_file, outfile)
def mosaic(fname, flist, preserve=True, verbose=False): print(f'mosaic: {fname} from {len(flist)} frags', flush=True) wbt = WhiteboxTools() wbt.verbose = verbose if len(flist) < 2: print('mosaic: less than 2 tifs', fname, flist, flush=True) return False wbt.mosaic(inputs=';'.join(flist), output=fname, method='nn') if not preserve: for f in flist: try: os.remove(f) except FileNotFoundError: pass return True
def make_ditches(): from whitebox import WhiteboxTools os.chdir('intermediate_data') md = os.getcwd() wbt = WhiteboxTools() for code in os.listdir('USLE'): path = os.path.join(md, 'USLE', code, 'ditches.tif') if os.path.exists(path): wbt.raster_to_vector_lines( path, os.path.abspath(os.path.join('scratch', f'{code}_ditches.shp'))) os.chdir(md) ditches = gpd.pd.concat([ gpd.read_file(os.path.join('scratch', f)) for f in os.listdir('scratch') if '.shp' in f ]) ditches = ditches[ditches.geometry.length > 10] ditches.to_file('ditches') os.chdir('..')
def make_soil_rasters(soils, categorical_columns, numeric_columns, county_code): '''Make rasters for soils data for the county of interest. args: soils (a gdf) categorical_columns: list of strings- columns that are categorical values. numeric_columns: columns that are numeric values. Returns: None. Data in all columns are encoded as rasters. Categorical columns are saved with a mapping (JSON dict) for their category names. ''' wbt = WhiteboxTools() scratch_soils = os.path.join(os.getcwd(), 'scratch', 'soils.shp') for column in categorical_columns: soils, mapping = encode_as_ints(soils, column) with open( os.path.join(os.getcwd(), 'intermediate_data', f'soils_{county_code}', f'{column}_mapping.txt'), 'w+') as file: try: print(json.dumps(mapping), file=file) except: print(mapping) raise soils.to_file(scratch_soils) for column in categorical_columns + numeric_columns: out_path = os.path.join(os.getcwd(), 'intermediate_data', f'soils_{county_code}', f'{column}.tif') loading.ensure_dir() wbt.vector_polygons_to_raster(scratch_soils, field=column[:10], output=out_path, cell_size=10) loading.ensure_dir()
def idw_job(path): if all_processed(path): return f'{path} processed' wbt = WhiteboxTools() wbt.verbose = False wbt.work_dir = path wbt.lidar_idw_interpolation(parameter='elevation', returns='last', resolution=10, weight=1.0, radius=20.0, exclude_cls='3,4,5,6,7,18') return f'{path} complete'
def geoprocess(fps, HUC12_code): '''Run all the wbt processes for the watershed. Used for running processes in multiple threads. ''' start = time.perf_counter() loc_fps = set_local_fps(fps, HUC12_code) if os.path.exists(loc_fps['RKLS']): return wbt = WhiteboxTools() #make shapefile of local lakes wbt.clip(loc_fps['waterbodies'], loc_fps['wshed_bounds'], loc_fps['wshed_lakes']) if os.path.exists(loc_fps['wshed_lakes']): lakes = gpd.read_file(loc_fps['wshed_lakes']) lakes['geometry'] = lakes.geometry.buffer(-1) lakes.to_file(loc_fps['wshed_lakes']) #erase lakes from the dem, for processing. wbt.erase_polygon_from_raster(loc_fps['dem'], loc_fps['wshed_lakes'], loc_fps['dem']) #prep_rasters_for_ls(loc_fps, wbt) #hydro-enforce the dem in two steps wbt.breach_single_cell_pits(loc_fps['dem'], loc_fps['pit_filled']) wbt.breach_depressions_least_cost(loc_fps['pit_filled'], loc_fps['pit_filled'], 1000, fill=True, max_cost=100) wbt.high_pass_filter(loc_fps['pit_filled'], loc_fps['high_pass'], 50, 50) wbt.d_inf_pointer(loc_fps['pit_filled'], loc_fps['pointer']) #make pointer wbt.d_inf_flow_accumulation(loc_fps['pointer'], loc_fps['sca_full'], pntr=True) #calc sca wbt.slope(loc_fps['pit_filled'], loc_fps['slope']) #calculate slope raster wbt.clip_raster_to_polygon(loc_fps['sca_full'], loc_fps['buffers'], loc_fps['sca'], maintain_dimensions=True) #make k-factor raster Feature_to_Raster(loc_fps['soils'], loc_fps['K_factors'], snap_raster=loc_fps['slope'], field_name='K_factor', NoData_value=-9999, data_type=gdal.GDT_Float32) #make subset of the R factor raster wbt.clip_raster_to_polygon( os.path.join(main_dir, 'intermediate_data', 'r_factor.tif'), loc_fps['wshed_bounds'], loc_fps['R']) ditch_detection(loc_fps, -.25, 5000) stop = time.perf_counter() minutes = (stop - start) / 60 with open(os.path.join(main_dir, 'intermediate_data', 'raster_log.txt'), 'a+') as f: f.write(f'watershed {HUC12_code} time to perform: {stop-start/60}') f.write('\n')
The path to in_dir is given to the script as an input. Intermediate files are stored in a directory following the same structure. Outputs are the swath map and (optional) daily and monthly maps. ******************************************************************************* """ # Import modules import numpy as np import glob, jenkspy, os, subprocess, sys from osgeo import gdal, osr from whitebox import WhiteboxTools wbt = WhiteboxTools() #******************************THRESHOLD VALUES*******************************# # B4 ToA reflectance tb4 = 17 # B20 Sea Surface Temperature in ºC tb20 = 1 # B7 ToA reflectance tb7 = 3.5 # VIS mask standard score tvis = 0.5 # Sea ice presence likelihood threshold silt = 10 # Scaling - do NOT modify
@author: eneemann """ # Import Libraries import os import time from whitebox import WhiteboxTools # Start timer and print start time in UTC start_time = time.time() readable_start = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) print("The script start time is {}".format(readable_start)) today = time.strftime("%Y%m%d") # Set up whitebox tools wbt = WhiteboxTools() wbt.set_verbose_mode(True) wbt.set_compress_rasters(True) # Define variables data_dir = r"C:\Users\eneemann\Desktop\Bountiful Lidar" os.chdir(data_dir) wbt.set_working_dir(data_dir) outfile = 'hh_all.tif' file_list = [] for file in os.listdir(data_dir): if 'hh' in file and file.endswith('.tif'): file_list.append(file)
def hillslopes(shed_shp, streams, out_dir): wbt=WhiteboxTools() ensure_dir() dem=os.path.join(out_dir, 'H2O_shed_DEM.tif') pit_filled=os.path.join(out_dir, 'pit_filled_dem.tif') pointer=os.path.join(out_dir, 'pointer.tif') streams_path=os.path.join(scratch_dir, 'streams_partial.tif') hillslope_path=os.path.join(out_dir, 'hillslopes.tif') ensure_dir() wbt.breach_depressions(dem, pit_filled) ensure_dir() wbt.d8_pointer(pit_filled, pointer) watershed_streams(shed_shp, streams, out_dir) ensure_dir() hiilslope_shp=os.path.join(out_dir, 'hillslopes.shp') assert same_raster_extents(pointer, streams_path) wbt.hillslopes(pointer, streams_path, hillslope_path) hillslope_gdf=polygonize(hillslope_path, mask=None) hillslope_gdf=hillslope[hillslope_gdf.geometry.area>20] hillslope_gdf.to_file(hillslope_path) d_inf_pointer=os.path.join(out_dir, 'd_inf.tif') slope_path=os.path.join(out_dir, 'slope.tif') wbt.d_inf_pointer(pit_filled, d_inf_pointer) wbt.slope(pit_filled, output) wbt.clip(hillslope_shp, os.path.join(out_dir, 'buffers.shp',), hillslope_shp ) hillslope_gdf=gpd.read_file(hillslope_shp) for raster in [pit_filled, slope_path, d_inf_pointer]: wbt.clip_raster_to_polygon(i, polygons, output)