def main(): parser = argparse.ArgumentParser() parser.add_argument('network', help='Output network ShapeFile path', type=argparse.FileType('r')) parser.add_argument('flowaccum', help='Flow accumulation raster', type=argparse.FileType('r')) parser.add_argument('buffer', help='Distance to buffer reach midpoint', type=float) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file logg = Logger("Reach Flow Accum") logfile = os.path.join(os.path.dirname(args.network.name), "reach_flowaccum.log") logg.setup(logPath=logfile, verbose=args.verbose) try: reach_drainage_area(args.network.name, args.flowaccum.name, args.buffer) except Exception as e: logg.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def get_nhd_states(inpath): """ Gets the list of US States that an NHD HUC encompasses This relies on the watershed boundary ShapeFile having a column called 'States' that stores a comma separated list of state abbreviations such as 'OR,WA'. A dcitionary is used to retrieve the full names. :param inpath: Path to the watershed boundary ShapeFile :return: List of full US state names that the watershed touches (.e.g. Oregon) """ log = Logger('RS Context') driver = ogr.GetDriverByName("ESRI Shapefile") data_source = driver.Open(inpath, 0) layer = data_source.GetLayer() states = [] for feature in layer: value = feature.GetField('States') [states.append(us_states[acronym]) for acronym in value.split(',')] data_source = None if 'Canada' in states: if len(states) == 1: log.error( 'HUC is entirely within Canada. No DEMs will be available.') else: log.warning( 'HUC is partially in Canada. Certain data will only be available for US portion.' ) log.info('HUC intersects {} state(s): {}'.format(len(states), ', '.join(states))) return list(dict.fromkeys(states))
def main(): parser = argparse.ArgumentParser( description='RVD XML Augmenter', # epilog="This is an epilog" ) parser.add_argument('out_project_xml', help='Input XML file', type=str) parser.add_argument('in_xmls', help='Comma-separated list of XMLs in decreasing priority', type=str) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file log = Logger('XML Augmenter') log.setup(verbose=args.verbose) log.title('XML Augmenter: {}'.format(args.out_project_xml)) try: out_prj = RSProject(None, args.out_project_xml) out_prj.rs_meta_augment( args.in_xmls.split(','), lyrs_in_out ) out_prj.XMLBuilder.write() report_path = out_prj.XMLBuilder.find('.//HTMLFile[@id="RVD_REPORT"]/Path').text report = RVDReport(os.path.join(out_prj.project_dir, report_path), out_prj) report.write() except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): """ Combined FIS """ parser = argparse.ArgumentParser() parser.add_argument('database', help='BRAT SQLite database', type=argparse.FileType('r')) parser.add_argument('maxdrainage', help='Maximum drainage area', type=float) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file logg = Logger("Combined FIS") logfile = os.path.join(os.path.dirname(args.network.name), "combined_fis.log") logg.setup(logPath=logfile, verbose=args.verbose) try: combined_fis(args.database.name, 'existing', 'EX', args.maxdrainage) # combined_fis(args.network.name, 'historic', 'HPE', args.maxdrainage) except Exception as ex: logg.error(ex) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): """ Vegetation Suitability """ parser = argparse.ArgumentParser() parser.add_argument('database', help='BRAT database path', type=argparse.FileType('r')) parser.add_argument('buffer', help='buffer distance (metres)', type=float) parser.add_argument('epoch', help='Existing or Historic', type=str) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file logg = Logger('Veg Summary') logfile = os.path.join(os.path.dirname(args.database.name), 'vegetation_summary.log') logg.setup(logPath=logfile, verbose=args.verbose) try: vegetation_suitability(args.database.name, args.raster.name, args.buffer, args.table) except Exception as e: logg.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): """ Main hydrology routine """ parser = argparse.ArgumentParser() parser.add_argument('database', help='BRAT SQLite database', type=str) parser.add_argument('prefix', help='Q2 or Low prefix', type=str) parser.add_argument('huc', help='HUC identification code', type=str) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file logg = Logger("Hydrology") logfile = os.path.join(os.path.dirname(args.database), "hydrology.log") logg.setup(logPath=logfile, verbose=args.verbose) try: hydrology(args.database, args.prefix, args.huc) except Exception as ex: logg.error(ex) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): parser = argparse.ArgumentParser() parser.add_argument('vpuids', help='Comma separated list of VPUs to process', type=str) parser.add_argument( 'data_folder', help='Top level data folder containing riverscapes context projects', type=str) #parser.add_argument('user_name', help='Postgres user name', type=str) #parser.add_argument('password', help='Postgres password', type=str) args = dotenv.parse_args_env( parser, os.path.join(os.path.dirname(__file__), '.env')) # Initiate the log file log = Logger('Load NHD') log.setup(logPath=os.path.join(args.data_folder, 'load_nhd.log'), verbose=True) try: load_nhd(args.vpuids, args.data_folder) # , args.user_name, args.password) log.info('Process completed successfully') except Exception as ex: log.error(ex)
def main(): parser = argparse.ArgumentParser() parser.add_argument('lst_xml_folder', help='Top level data folder containing LST data', type=str) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) args = dotenv.parse_args_env( parser, os.path.join(os.path.dirname(__file__), '.env')) # Initiate the log file log = Logger('Land Surface Temperature XML Generator') log.setup(logPath=os.path.join(os.path.dirname(args.lst_xml_folder), 'lst_xml.log'), verbose=args.verbose) try: process_lst(args.lst_xml_folder) log.info('Process completed successfully') except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): parser = argparse.ArgumentParser(description='GNAT', # epilog="This is an epilog" ) parser.add_argument('huc', help='HUC identifier', type=str) parser.add_argument('output_folder', help='Output folder', type=str) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file log = Logger("GNAT") log.setup(logPath=os.path.join(args.output, "gnat.log"), verbose=args.verbose) log.title('GNAT For HUC: {}'.format(args.huc)) try: gnat(args.hu, args.output_folder) except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): parser = argparse.ArgumentParser() parser.add_argument('dir', help='Folder to search for image files', type=str) parser.add_argument('vrt', help='Output path for VRT file', type=str) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = parser.parse_args() # make sure the output folder exists results_folder = os.path.dirname(args.vrt) if not os.path.isdir(results_folder): os.mkdir(results_folder) # Initiate the log file logg = Logger("Build VRT") logfile = os.path.join(results_folder, "build_vrt.log") logg.setup(logPath=logfile, verbose=args.verbose) try: build_vrt(args.dir, args.vrt) except Exception as e: logg.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def run_subprocess(cwd: str, cmd: List[str]): log = Logger("Subprocess") log.info('Running command: {}'.format(' '.join(cmd))) # Realtime logging from subprocess process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) # Here we print the lines in real time but we will also log them afterwords # replace '' with b'' for Python 3 for output in iter(process.stdout.readline, b''): for line in output.decode('utf-8').split('\n'): if len(line) > 0: log.info(line) for errout in iter(process.stderr.readline, b''): for line in errout.decode('utf-8').split('\n'): if len(line) > 0: log.error(line) retcode = process.poll() if retcode is not None and retcode > 0: log.error('Process returned with code {}'.format(retcode)) return retcode
def main(): """ Main BRAT Run """ parser = argparse.ArgumentParser( description='Run brat against a pre-existing sqlite db:', # epilog="This is an epilog" ) parser.add_argument('project', help='Riverscapes project folder or project xml file', type=str, default=None) parser.add_argument( '--csv_dir', help='(optional) directory where we can find updated lookup tables', action='store_true', default=False) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) parser.add_argument( '--debug', help= '(optional) more output about things like memory usage. There is a performance cost', action='store_true', default=False) args = dotenv.parse_args_env(parser) if os.path.isfile(args.project): logpath = os.path.dirname(args.project) elif os.path.isdir(args.project): logpath = args.project else: raise Exception( 'You must supply a valid path to a riverscapes project') log = Logger('BRAT Run') log.setup(logPath=os.path.join(logpath, "brat_run.log"), verbose=args.verbose) log.title('BRAT Run Tool') try: if args.debug is True: from rscommons.debug import ThreadRun memfile = os.path.join(logpath, 'brat_run_memusage.log') retcode, max_obj = ThreadRun(brat_run, memfile, args.project, args.csv_dir) log.debug('Return code: {}, [Max process usage] {}'.format( retcode, max_obj)) else: brat_run(args.project, args.csv_dir) except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): parser = argparse.ArgumentParser( description='RVD', # epilog="This is an epilog" ) parser.add_argument('huc', help='HUC identifier', type=str) parser.add_argument('flowlines', help='Segmented flowlines input.', type=str) parser.add_argument('existing', help='National existing vegetation raster', type=str) parser.add_argument('historic', help='National historic vegetation raster', type=str) parser.add_argument('valley_bottom', help='Valley bottom (.shp, .gpkg/layer_name)', type=str) parser.add_argument('output_folder', help='Output folder input', type=str) parser.add_argument('--reach_codes', help='Comma delimited reach codes (FCode) to retain when filtering features. Omitting this option retains all features.', type=str) parser.add_argument('--flow_areas', help='(optional) path to the flow area polygon feature class containing artificial paths', type=str) parser.add_argument('--waterbodies', help='(optional) waterbodies input', type=str) parser.add_argument('--meta', help='riverscapes project metadata as comma separated key=value pairs', type=str) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) parser.add_argument('--debug', help="(optional) save intermediate outputs for debugging", action='store_true', default=False) args = dotenv.parse_args_env(parser) reach_codes = args.reach_codes.split(',') if args.reach_codes else None meta = parse_metadata(args.meta) # Initiate the log file log = Logger("RVD") log.setup(logPath=os.path.join(args.output_folder, "rvd.log"), verbose=args.verbose) log.title('RVD For HUC: {}'.format(args.huc)) try: if args.debug is True: from rscommons.debug import ThreadRun memfile = os.path.join(args.output_dir, 'rvd_mem.log') retcode, max_obj = ThreadRun(rvd, memfile, args.huc, args.flowlines, args.existing, args.historic, args.valley_bottom, args.output_folder, reach_codes, args.flow_areas, args.waterbodies, meta=meta) log.debug('Return code: {}, [Max process usage] {}'.format(retcode, max_obj)) else: rvd(args.huc, args.flowlines, args.existing, args.historic, args.valley_bottom, args.output_folder, reach_codes, args.flow_areas, args.waterbodies, meta=meta) except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def safe_remove_dir(dir_path): """Remove a directory without throwing an error Args: file_path ([type]): [description] """ log = Logger("safe_remove_dir") try: shutil.rmtree(dir_path, ignore_errors=True) log.debug('Directory removed: {}'.format(dir_path)) except Exception as e: log.error(str(e))
def safe_remove_file(file_path): """Remove a file without throwing an error Args: file_path ([type]): [description] """ log = Logger("safe_remove_file") try: if not os.path.isfile(file_path): log.warning('File not found: {}'.format(file_path)) os.remove(file_path) log.debug('File removed: {}'.format(file_path)) except Exception as e: log.error(str(e))
def file_compare(file_a, file_b, md5=True): """Do a file comparison, starting with file size and finishing with md5 Args: file_a ([type]): [description] file_b ([type]): [description] Returns: [type]: [description] """ log = Logger("file_compare") log.debug('Comparing: {} {}'.format(file_a, file_b)) try: # If the file sizes aren't the same then there's # no reason to do anything more a_stats = os.stat(file_a) b_stats = os.stat(file_b) if a_stats.st_size != b_stats.st_size: log.debug('Files are NOT the same size: {:,} vs. {:,}') return False # If we want this to be a quick-compare and not do MD5 then we just # do the file size and leave it at that if not md5: return True with open(file_a, 'rb') as afile: hasher1 = hashlib.md5() buf1 = afile.read() hasher1.update(buf1) md5_a = (str(hasher1.hexdigest())) with open(file_b, 'rb') as bfile: hasher2 = hashlib.md5() buf1 = bfile.read() hasher2.update(buf1) md5_b = (str(hasher2.hexdigest())) # Compare md5 if(md5_a == md5_b): log.debug('File MD5 hashes match') return True else: log.debug('File MD5 hashes DO NOT match') return False except Exception as e: log.error('Error comparing files: {}', str(e)) return False
def main(): # TODO Add transportation networks to vbet inputs # TODO Prepare clipped NHD Catchments as vbet polygons input parser = argparse.ArgumentParser( description='Floodplain Connectivity (BETA)', # epilog="This is an epilog" ) parser.add_argument('vbet_network', help='Vector line network', type=str) parser.add_argument('vbet_polygon', help='Vector polygon layer', type=str) parser.add_argument('roads', help='Vector line network', type=str) parser.add_argument('railroads', help='Vector line network', type=str) parser.add_argument('output_dir', help='Folder where output project will be created', type=str) parser.add_argument('--debug_gpkg', help='Debug geopackage', type=str) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) args = dotenv.parse_args_env(parser) # make sure the output folder exists safe_makedirs(args.output_dir) # Initiate the log file log = Logger('FLOOD_CONN') log.setup(logPath=os.path.join(args.output_dir, 'floodplain_connectivity.log'), verbose=args.verbose) log.title('Floodplain Connectivity (BETA)') try: floodplain_connectivity(args.vbet_network, args.vbet_polygon, args.roads, args.railroads, args.output_dir, args.debug_gpkg) except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def update_database(db_path, csv_path): """ Update the lookup tables from CSV files in a path Arguments: db_path {[type]} -- [description] csv_path {[type]} -- [description] Raises: Exception: [description] Returns: [type] -- [description] """ log = Logger('DatabaseUpdate') csv_path = csv_path if csv_path else os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', '..', 'database', 'data') if not os.path.isfile(db_path): raise Exception('No existing db found at path: {}'.format(db_path)) log.info('Updating SQLite database at {0}'.format(db_path)) conn = sqlite3.connect(db_path) conn.execute('PRAGMA foreign_keys = ON;') conn.row_factory = dict_factory curs = conn.cursor() try: huc = conn.execute('SELECT WatershedID FROM vwReaches GROUP BY WatershedID').fetchall()[0]['WatershedID'] except Exception as e: log.error('Error retrieving HUC from DB') raise e # Load lookup table data into the database load_lookup_data(db_path, csv_path) # Updated the database will reload ALL watersheds. Keep only the designated watershed for this run curs.execute('DELETE FROM Watersheds WHERE WatershedID <> ?', [huc]) conn.commit() conn.execute("VACUUM") return db_path
def main(): parser = argparse.ArgumentParser() parser.add_argument('network', help='Input stream network ShapeFile path', type=str) parser.add_argument('segmented', help='Output segmented network ShapeFile path', type=str) parser.add_argument( 'interval', help='Interval distance at which to segment the network', type=float) parser.add_argument('minimum', help='Minimum feature length in the segmented network', type=float) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file logg = Logger("Segment Network") logfile = os.path.join(os.path.dirname(args.segmented), "segment_network.log") logg.setup(logPath=logfile, verbose=args.verbose) if os.path.isfile(args.segmented): logg.info('Deleting existing output {}'.format(args.segmented)) shpDriver = ogr.GetDriverByName("ESRI Shapefile") shpDriver.DeleteDataSource(args.segmented) try: segment_network(args.network, args.segmented, args.interval, args.minimum, args.tolerance) except Exception as e: logg.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def calculate_hydrology(reaches: dict, equation: str, params: dict, drainage_conversion_factor: float, field: str) -> dict: """ Perform the actual hydrology calculation Args: reaches ([type]): [description] equation ([type]): [description] params ([type]): [description] drainage_conversion_factor ([type]): [description] field ([type]): [description] Raises: ex: [description] Returns: [type]: [description] """ results = {} log = Logger('Hydrology') try: # Loop over each reach for reachid, values in reaches.items(): # Use the drainage area for the current reach and convert to the units used in the equation params[ DRNAREA_PARAM] = values['iGeo_DA'] * drainage_conversion_factor # Execute the equation but restrict the use of all built-in functions eval_result = eval(equation, {'__builtins__': None}, params) results[reachid] = {field: eval_result} except Exception as ex: [ log.warning('{}: {}'.format(param, value)) for param, value in params.items() ] log.warning('Hydrology formula failed: {}'.format(equation)) log.error('Error calculating {} hydrology') raise ex return results
class Splines(GeoSmtBase): def __init__(self): self.lg = GeoSmtBase() self.log = Logger('Splines') def compSplineKnots(self, x, y, s, k, nest=-1): """ Computed with Scipy splprep. Find the B-spline representation of an N-dimensional curve. Spline parameters: :s - smoothness parameter :k - spline order :nest - estimate of number of knots needed (-1 = maximal) """ tck_u, fp, ier, msg = splprep([x, y], s=s, k=k, nest=nest, full_output=1) if ier > 0: self.log.error("{0}. ier={1}".format(msg, ier)) return (tck_u, fp) def compSplineEv(self, x, tck, zoom=10): """ Computed with Scipy splev. Given the knots and coefficients of a B-spline representation, evaluate the value of the smoothing polynomial and its derivatives Parameters: :tck - A tuple (t,c,k) containing the vector of knots, the B-spline coefficients, and the degree of the spline. """ n_coords = len(x) n_len = n_coords * zoom x_ip, y_ip = splev(np.linspace(0, 1, n_len), tck) return (x_ip, y_ip)
def deleteRaster(sFullPath): """ :param path: :return: """ log = Logger("Delete Raster") if path.isfile(sFullPath): try: # Delete the raster properly driver = gdal.GetDriverByName('GTiff') gdal.Driver.Delete(driver, sFullPath) log.debug("Raster Successfully Deleted: {0}".format(sFullPath)) except Exception as e: log.error("Failed to remove existing clipped raster at {0}".format( sFullPath)) raise e else: log.debug("No raster file to delete at {0}".format(sFullPath))
def main(): parser = argparse.ArgumentParser() parser.add_argument('inraster', help='Input raster', type=str) parser.add_argument('outraster', help='Output raster', type=str) parser.add_argument('epsg', help='Output spatial reference EPSG', type=int) parser.add_argument('clip', help='Polygon ShapeFile to clip the output raster', type=argparse.FileType('r')) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = parser.parse_args() # Initiate the log file log = Logger("Raster Warp") log.setup(logPath=os.path.join(os.path.dirname(args.outraster), "raster_warp.log")) # make sure the output folder exists results_folder = os.path.dirname(args.outraster) if not os.path.isdir(results_folder): os.mkdir(results_folder) if os.path.isfile(args.outraster): log.info('Deleting existing output raster: {}'.format(args.outraster)) driver = gdal.GetDriverByName('GTiff') gdal.Driver.Delete(driver, args.outraster) try: raster_warp(args.inraster, args.outraster, args.epsg, args.clip.name if args.clip else None) except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): parser = argparse.ArgumentParser() parser.add_argument('out_sqlite', help='output sqlite file', type=str) parser.add_argument('modis_folder', help='Top level data folder containing MODIS data', type=str) parser.add_argument('nhd_folder', help='Top level data folder containing nhd data', type=str) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) parser.add_argument('--debug', help='(optional) a little extra logging ', action='store_true', default=False) args = dotenv.parse_args_env( parser, os.path.join(os.path.dirname(__file__), '.env')) # Initiate the log file log = Logger('Land Surface Temperature') log.setup(logPath=os.path.join( os.path.dirname(args.out_sqlite), os.path.splitext(args.out_sqlite)[0] + 'process_LST.log'), verbose=args.verbose) try: process_modis(args.out_sqlite, args.modis_folder, args.nhd_folder, args.verbose, args.debug) log.info('Process completed successfully') except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def safe_makedirs(dir_create_path): """safely, recursively make a directory Arguments: dir_create_path {[type]} -- [description] """ log = Logger("MakeDir") # Safety check on path lengths if len(dir_create_path) < 5 or len(dir_create_path.split(os.path.sep)) <= 2: raise Exception('Invalid path: {}'.format(dir_create_path)) if os.path.exists(dir_create_path) and os.path.isfile(dir_create_path): raise Exception('Can\'t create directory if there is a file of the same name: {}'.format(dir_create_path)) if not os.path.exists(dir_create_path): try: log.info('Folder not found. Creating: {}'.format(dir_create_path)) os.makedirs(dir_create_path) except Exception as e: # Possible that something else made the folder while we were trying if not os.path.exists(dir_create_path): log.error('Could not create folder: {}'.format(dir_create_path)) raise e
def main(): """ Conservation """ parser = argparse.ArgumentParser() parser.add_argument('database', help='BRAT SQLite database', type=str) parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False) args = dotenv.parse_args_env(parser) log = Logger('Conservation') logfile = os.path.join(os.path.dirname(args.database), 'conservation.log') log.setup(logPath=logfile, verbose=args.verbose) try: conservation(args.database) except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def build_topography(boundary, epsg, download_folder, unzip_folder, dem, slope=None, hillshade=None, clean_downloads=False, clean_intermediates=False): log = Logger('Build Topo') dem_parts = [] if not os.path.isfile(dem) or slope and not os.path.isfile( slope) or hillshade and not os.path.isfile(hillshade): dem_parts = download_dem(boundary, epsg, 0.01, download_folder, unzip_folder) try: if not os.path.isfile(dem): raster_vrt_stitch(dem_parts, dem, epsg, boundary) if slope: build_derived_raster(boundary, epsg, dem_parts, slope, 'slope') if hillshade: build_derived_raster(boundary, epsg, dem_parts, hillshade, 'hillshade') except Exception as e: log.error('Error building topography.') if clean_intermediates: try: [shutil.rmtree(os.path.dirname(temp)) for temp in dem_parts] except Exception as e: log.error('Error cleaning topography intermediate files.')
def raster_warp(inraster: str, outraster: str, epsg, clip=None, warp_options: dict = {}): """ Reproject a raster to a different coordinate system. :param inraster: Input dataset :param outraster: Output dataset :param epsg: Output spatial reference EPSG identifier :param log: Log file object :param clip: Optional Polygon dataset to clip the output. :param warp_options: Extra GDALWarpOptions. :return: None https://gdal.org/python/osgeo.gdal-module.html#WarpOptions """ log = Logger('Raster Warp') if os.path.isfile(outraster): log.info( 'Skipping raster warp because output exists {}'.format(outraster)) return None log.info('Raster Warp input raster {}'.format(inraster)) log.info('Raster Warp output raster {}'.format(outraster)) log.info('Output spatial reference EPSG: {}'.format(epsg)) output_folder = os.path.dirname(outraster) if not os.path.isdir(output_folder): os.mkdir(output_folder) warpvrt = os.path.join(os.path.dirname(outraster), 'temp_gdal_warp_output.vrt') log.info('Performing GDAL warp to temporary VRT file.') if clip: log.info('Clipping to polygons using {}'.format(clip)) clip_ds, clip_layer = VectorBase.path_sorter(clip) warp_options_obj = gdal.WarpOptions(dstSRS='EPSG:{}'.format(epsg), format='vrt', cutlineDSName=clip_ds, cutlineLayer=clip_layer, cropToCutline=True, **warp_options) else: warp_options_obj = gdal.WarpOptions(dstSRS='EPSG:{}'.format(epsg), format='vrt', **warp_options) ds = gdal.Warp(warpvrt, inraster, options=warp_options_obj) log.info( 'Using GDAL translate to convert VRT to compressed raster format.') translateoptions = gdal.TranslateOptions( gdal.ParseCommandLine("-of Gtiff -co COMPRESS=DEFLATE")) gdal.Translate(outraster, ds, options=translateoptions) # Cleanup the temporary VRT file os.remove(warpvrt) if ds: log.info('Process completed successfully.') else: log.error('Error running GDAL Warp')
def main(): parser = argparse.ArgumentParser(description='Confinement Tool') parser.add_argument('huc', help='HUC identifier', type=str) parser.add_argument('flowlines', help="NHD Flowlines (.shp, .gpkg/layer_name)", type=str) parser.add_argument( 'confining_polygon', help= 'valley bottom or other polygon representing confining boundary (.shp, .gpkg/layer_name)', type=str) parser.add_argument('output_folder', help='Output folder', type=str) parser.add_argument( 'buffer_field', help='(optional) float field in flowlines with buffer values', default=None) parser.add_argument('confinement_type', help='type of confinement', default="Unspecified") parser.add_argument( '--reach_codes', help= 'Comma delimited reach codes (FCode) to retain when filtering features. Omitting this option retains all features.', type=str) parser.add_argument( '--meta', help='riverscapes project metadata as comma separated key=value pairs', type=str) parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False) parser.add_argument( '--debug', help="(optional) save intermediate outputs for debugging", action='store_true', default=False) args = dotenv.parse_args_env(parser) # Initiate the log file log = Logger("Confinement") log.setup(logPath=os.path.join(args.output_folder, "confinement.log"), verbose=args.verbose) log.title('Confinement For HUC: {}'.format(args.huc)) meta = parse_metadata(args.meta) reach_codes = args.reach_codes.split(',') if args.reach_codes else None try: if args.debug is True: from rscommons.debug import ThreadRun memfile = os.path.join(args.output_folder, 'confinement_mem.log') retcode, max_obj = ThreadRun(confinement, memfile, args.huc, args.flowlines, args.confining_polygon, args.output_folder, args.buffer_field, args.confinement_type, reach_codes, min_buffer=10.0, bankfull_expansion_factor=2.5, debug=args.debug, meta=meta) log.debug('Return code: {}, [Max process usage] {}'.format( retcode, max_obj)) else: confinement(args.huc, args.flowlines, args.confining_polygon, args.output_folder, args.buffer_field, args.confinement_type, reach_codes, min_buffer=10.0, bankfull_expansion_factor=2.5, debug=args.debug, meta=meta) except Exception as e: log.error(e) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def verify_areas(raster_path, boundary_shp): """[summary] Arguments: raster_path {[type]} -- path boundary_shp {[type]} -- path Raises: Exception: [description] if raster area is zero Exception: [description] if shapefile area is zero Returns: [type] -- rastio of raster area over shape file area """ log = Logger('Verify Areas') log.info('Verifying raster and shape areas') # This comes back in the raster's unit raster_area = 0 with rasterio.open(raster_path) as ds: cell_count = 0 gt = ds.get_transform() cell_area = math.fabs(gt[1]) * math.fabs(gt[5]) # Incrememntally add the area of a block to the count progbar = ProgressBar(len(list(ds.block_windows(1))), 50, "Calculating Area") progcount = 0 for _ji, window in ds.block_windows(1): r = ds.read(1, window=window, masked=True) progbar.update(progcount) cell_count += r.count() progcount += 1 progbar.finish() # Multiply the count by the area of a given cell raster_area = cell_area * cell_count log.debug('raster area {}'.format(raster_area)) if (raster_area == 0): raise Exception('Raster has zero area: {}'.format(raster_path)) # We could just use Rasterio's CRS object but it doesn't seem to play nice with GDAL so.... raster_ds = gdal.Open(raster_path) raster_srs = osr.SpatialReference(wkt=raster_ds.GetProjection()) # Load and transform ownership polygons by adminstration agency driver = ogr.GetDriverByName("ESRI Shapefile") data_source = driver.Open(boundary_shp, 0) layer = data_source.GetLayer() in_spatial_ref = layer.GetSpatialRef() # https://github.com/OSGeo/gdal/issues/1546 raster_srs.SetAxisMappingStrategy(in_spatial_ref.GetAxisMappingStrategy()) transform = osr.CoordinateTransformation(in_spatial_ref, raster_srs) shape_area = 0 for polygon in layer: geom = polygon.GetGeometryRef() geom.Transform(transform) shape_area = shape_area + geom.GetArea() log.debug('shape file area {}'.format(shape_area)) if (shape_area == 0): raise Exception('Shapefile has zero area: {}'.format(boundary_shp)) area_ratio = raster_area / shape_area if (area_ratio < 0.99 and area_ratio > 0.9): log.warning('Raster Area covers only {0:.2f}% of the shapefile'.format( area_ratio * 100)) if (area_ratio <= 0.9): log.error('Raster Area covers only {0:.2f}% of the shapefile'.format( area_ratio * 100)) else: log.info('Raster Area covers {0:.2f}% of the shapefile'.format( area_ratio * 100)) return area_ratio