def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('workbench', help='Workbench database path', type=argparse.FileType('r')) parser.add_argument('outputfolder', help='Path to output folder', type=str) args = parser.parse_args() if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) # Initiate the log file logg = Logger("Measurement Downloader") logfile = os.path.join(args.outputfolder, "measurement_downloader.log") logg.setup(logPath=logfile, verbose=False) try: metric_downloader(args.workbench.name, args.outputfolder) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: logg.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: logg.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('--jsonfile', help='The sync file. Helps speed a process up to figure out which files to work with.', default="topomover.json", type=str) parser.add_argument('--verbose', help = 'Get more information in your logs.', action='store_true', default=False) logg = Logger("CADExport") logfile = os.path.join(os.path.dirname(__file__), "TopoMover.log") logg.setup(logPath=logfile, verbose=False) logging.getLogger("boto3").setLevel(logging.ERROR) args = parser.parse_args() try: topomover(args.jsonfile) except (MissingException, NetworkException, DataException) as e: traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def move_measurements(old_folder, new_folder): log = Logger('Move Measurements') log.setup(logPath=os.path.join(new_folder, datetime.now().strftime("%Y%m%d-%H%M%S") + 'move_measurements.log')) # Create a List measurements = list() # Iterate over the directory tree and check if directory is empty. for (dirpath, dirnames, filenames) in os.walk(old_folder): for file in filenames: measurements.append(os.path.join(dirpath, file)) log.info('{} measurement files to move'.format(len(measurements))) for meas in measurements: new_path = os.path.join(os.path.dirname(meas.replace(old_folder, new_folder)), 'AuxMeasurements', os.path.basename(meas)) if not os.path.isdir(os.path.dirname(new_path)): os.makedirs(os.path.dirname(new_path)) os.rename(meas, new_path) log.info('Moving {} to {}'.format(meas, new_path)) # Create a List listOfEmptyDirs = list() # Iterate over the directory tree and check if directory is empty. for (dirpath, dirnames, filenames) in os.walk(old_folder): if len(dirnames) == 0 and len(filenames) == 0: listOfEmptyDirs.append(dirpath) print(len(listOfEmptyDirs), 'empty folders') for empty in listOfEmptyDirs: os.rmdir(empty) log.info('Process Complete')
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('visitID', help='Visit ID', type=int) parser.add_argument('outputfolder', help='Path to output folder', type=str) parser.add_argument('--hydroprojectxml', '-p', help='(optional) hydro project xml file', type=str) parser.add_argument('--topoprojectxml', '-t', help='(optional) topo project xml file', type=str) parser.add_argument('--datafolder', help='(optional) Top level folder containing Hydro Model Riverscapes projects', type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False ) args = parser.parse_args() # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") # Initiate the log file logg = Logger("Program") logfile = os.path.join(resultsFolder, "hydro_gis.log") xmlfile = os.path.join(resultsFolder, "hydro_gis.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) try: # Make some folders if we need to: if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) # If we need to go get our own topodata.zip file and unzip it we do this # if args.datafolder is None: # hydroDataFolder = os.path.join(args.outputfolder, "inputs") # folderJSON, list_projectFolders = downloadUnzipTopo(args.visitID, hydroDataFolder) # # otherwise just pass in a path to existing data # else: # list_projectFolders = args.datafolder # runResult = [] # for fileJSON, projectFolder in list_projectFolders: result = hydro_gis_export(args.hydroprojectxml, args.topoprojectxml, resultsFolder) sys.exit(result) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1)
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('siteid', help='the id of the site to use (no spaces)', type=str) parser.add_argument('outputfolder', help='Output folder') parser.add_argument('--logfile', help='Get more information in your logs.', default="", type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") topoDataFolder = os.path.join(args.outputfolder, "inputs") if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) if not os.path.isdir(topoDataFolder): os.makedirs(topoDataFolder) # Initiate the log file if args.logfile == "": logfile = os.path.join(resultsFolder, "siteproperties.log") else: logfile = args.logfile logg = Logger("SiteProperties") logg.setup(logPath=logfile, verbose=args.verbose) try: sitePropsGenerator(args.siteid, resultsFolder, topoDataFolder, args.verbose) except (MissingException, NetworkException, DataException) as e: traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('visitID', help='the id of the site to use (no spaces)',type=str) parser.add_argument('outputfolder', help='Output folder') parser.add_argument('--datafolder', help='(optional) Top level folder containing TopoMetrics Riverscapes projects', type=str) parser.add_argument('--logfile', help='output log file.', default="", type=str) parser.add_argument('--verbose', help = 'Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") # Initiate the log file if args.logfile == "": logfile = os.path.join(resultsFolder, "cad_export.log") else: logfile = args.logfile logg = Logger("CADExport") logg.setup(logPath=logfile, verbose=args.verbose) try: # Make some folders if we need to: if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) # If we need to go get our own topodata.zip file and unzip it we do this if args.datafolder is None: topoDataFolder = os.path.join(args.outputfolder, "inputs") fileJSON, projectFolder = downloadUnzipTopo(args.visitID, topoDataFolder) # otherwise just pass in a path to existing data else: projectFolder = args.datafolder projectxml = os.path.join(projectFolder, "project.rs.xml") finalResult = export_cad_files(projectxml, resultsFolder) except (MissingException, NetworkException, DataException) as e: traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('visitID', help='Visit ID', type=int) parser.add_argument('outputfolder', help='Path to output folder', type=str) parser.add_argument( '--datafolder', help= '(optional) Top level folder containing TopoMetrics Riverscapes projects', type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") # Initiate the log file logg = Logger("Program") logfile = os.path.join(resultsFolder, "aux_metrics.log") xmlfile = os.path.join(resultsFolder, "aux_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) try: if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) runAuxMetrics(xmlfile, resultsFolder, args.visitID) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def champ_topo_checker(workbench, folder): log = Logger('CHaMP Files') log.setup(logPath=os.path.join(folder, datetime.now().strftime("%Y%m%d-%H%M%S") + '_champ_files.log')) dbCon = sqlite3.connect(workbench) dbCurs = dbCon.cursor() dbCurs.execute('SELECT WatershedName, VisitYear, SiteName, VisitID' + ' FROM vwVisits WHERE ProgramID = 1 AND ProtocolID IN (2030, 416, 806, 1966, 2020, 1955, 1880, 10036, 9999)' + ' ORDER BY VisitYear, WatershedName') for row in dbCurs.fetchall(): watershed = row[0] visit_year = row[1] site = row[2] visitID = row[3] visit_path = os.path.join(folder, str(visit_year), watershed.replace(' ', ''), site.replace(' ', ''), 'VISIT_{}'.format(visitID)) log.info('Processing {}'.format(visit_path)) if not os.path.isdir(visit_path): os.makedirs(visit_path) try: visit_data = APIGet('visits/{}'.format(visitID)) # Write visit information to json file with open(os.path.join(visit_path, 'visit_info.json'), 'w') as json_file: json.dump(visit_data, json_file) # Loop over the two lists of folders per visit: field folders and visit folders for api_key, local_folder in {'fieldFolders': 'Field Folders', 'folders': 'Visit Folders'}.items(): if api_key in visit_data and isinstance(visit_data[api_key], list): for folder_name in visit_data[api_key]: field_folder_path = os.path.join(visit_path, local_folder, folder_name['name']) field_folder_data = APIGet(folder_name['url'], True) if isinstance(field_folder_data, dict) and 'files' in field_folder_data: [download_file(file_dict, field_folder_path) for file_dict in field_folder_data['files']] # Get all the miscellaneous files for the visit [download_file(file_dict, os.path.join(visit_path, 'Files')) for file_dict in visit_data['files']] except Exception as e: log.error('Error for visit {}: {}'.format(visitID, e)) log.info('Process Complete')
def champ_topo_checker(workbench, folder): log = Logger('CHaMP Files') log.setup(logPath=os.path.join( folder, datetime.now().strftime("%Y%m%d-%H%M%S") + '_champ_folder_check.log')) # # Loop over site names organized by field season and watershed # dbCon = sqlite3.connect(workbench) # dbCurs = dbCon.cursor() # dbCurs.execute('SELECT WatershedName, VisitYear, SiteName' + # ' FROM vwVisits WHERE ProgramID = 1 AND ProtocolID IN (2030, 416, 806, 1966, 2020, 1955, 1880, 10036, 9999)' + # ' GROUP BY WatershedName, VisitYear, SiteName' + # ' ORDER BY VisitYear, WatershedName, SiteName') # # for row in dbCurs.fetchall(): # # watershed = row[0] # visit_year = row[1] # site = row[2] # # visitID = row[3] # # visit_path1 = os.path.join(folder, str(visit_year), watershed.replace(' ', ''), site) # visit_path2 = visit_path1.replace(' ', '') # if ' ' in site and os.path.isdir(visit_path1) and os.path.isdir(visit_path2): # try: # process_duplicate_folder(visit_path1, visit_path2) # except Exception as e: # log.error('Error processing {}'.format(visit_path1)) # Create a List listOfEmptyDirs = list() # Iterate over the directory tree and check if directory is empty. for (dirpath, dirnames, filenames) in os.walk(folder): if len(dirnames) == 0 and len(filenames) == 0: listOfEmptyDirs.append(dirpath) print(len(listOfEmptyDirs), 'empty folders') for empty in listOfEmptyDirs: os.rmdir(empty) log.info('Process Complete')
def BatchRun(workbench, topoData, outputDir): dbCon = sqlite3.connect(workbench) dbCurs = dbCon.cursor() # dbCurs.execute('SELECT VisitID, WatershedName, VisitYear, SiteName FROM vwMainVisitList WHERE (VisitID IN ({0}))'.format(','.join(map(lambda x: str(x), jdAux)))) # for row in dbCurs.fetchall(): log = Logger('Topo Metrics') log.setup(logPath=os.path.join(outputDir, "topo_metrics.log"), verbose=False) projects = getTopoProjects(topoData) print len(projects), 'topo projects found in', topoData rootOutput = os.path.join(outputDir, 'YankeeFork') print 'Outputing results to', rootOutput for project in projects: print(project) # if project[0] == 9028 or project[0] == 9027 or project[0] == 9023 or project[0] == 9022: # continue outputFolder = project[3].replace(topoData, outputDir) if not os.path.isdir(outputFolder): os.makedirs(outputFolder) # Generate a Channel Units JSON file using the ShapeFile as the truth jsonFilePath = os.path.join(outputFolder, 'channel_units.json') createChannelUnitsJSON(project[3], project[0], jsonFilePath) # Calculate topo metrics visitTopoMetrics(project[0], os.path.join(outputFolder, 'topo_metrics.xml'), project[3], jsonFilePath, None, dUnitDefs) print(projects)
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('visitID', help='Visit ID', type=int) parser.add_argument('outputfolder', help='Path to output folder', type=str) parser.add_argument( '--datafolder', help= '(optional) Top level folder containing TopoMetrics Riverscapes projects', type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") # Initiate the log file logg = Logger("Program") logfile = os.path.join(resultsFolder, "bankfull_metrics.log") xmlfile = os.path.join(resultsFolder, "bankfull_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) try: # Make some folders if we need to: if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) # If we need to go get our own topodata.zip file and unzip it we do this if args.datafolder is None: topoDataFolder = os.path.join(args.outputfolder, "inputs") fileJSON, projectFolder = downloadUnzipTopo( args.visitID, topoDataFolder) # otherwise just pass in a path to existing data else: projectFolder = args.datafolder from lib.topoproject import TopoProject topo_project = TopoProject( os.path.join(projectFolder, "project.rs.xml")) tree = ET.parse(os.path.join(projectFolder, "project.rs.xml")) root = tree.getroot() visitid = root.findtext( "./MetaData/Meta[@name='Visit']") if root.findtext( "./MetaData/Meta[@name='Visit']" ) is not None else root.findtext( "./MetaData/Meta[@name='VisitID']") finalResult = bankfull_metrics(topo_project.getpath("DEM"), topo_project.getpath("DetrendedDEM"), topo_project.getpath("Topo_Points")) write_bfmetrics_xml(finalResult, visitid, xmlfile) sys.exit(0) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1)
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('visitID', help='Visit ID', type=int) parser.add_argument('outputfolder', help='Path to output folder', type=str) parser.add_argument('--channelunitsjson', help='(optional) json file to load channel units from', type=str) parser.add_argument('--workbenchdb', help='(optional) sqlite db to load channel units from', type=str) parser.add_argument( '--datafolder', help= '(optional) Top level folder containing TopoMetrics Riverscapes projects', type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") # Initiate the log file logg = Logger("Program") logfile = os.path.join(resultsFolder, "topo_metrics.log") xmlfile = os.path.join(resultsFolder, "topo_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) try: # Make some folders if we need to: if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) projectFolder = "" # If we need to go get our own topodata.zip file and unzip it we do this if args.datafolder is None: topoDataFolder = os.path.join(args.outputfolder, "inputs") fileJSON, projectFolder = downloadUnzipTopo( args.visitID, topoDataFolder) # otherwise just pass in a path to existing data else: projectFolder = args.datafolder dMetricsObj = visitTopoMetrics(args.visitID, xmlfile, projectFolder, args.channelunitsjson, args.workbenchdb) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: logg.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: logg.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def main(): """Run one or more models on local CHaMP/AEM visits. Make sure command prompt is open with the appropriate environment for the model(s) to be run.""" parser = argparse.ArgumentParser(description=main.__doc__) parser.add_argument('outputfolder', help='Path to output folder', type=str) parser.add_argument('-v', '--validation', help="Run Validation", action='store_true', default=False) parser.add_argument('-m', '--topometrics', help="Run Topo Metrics", action='store_true', default=False) parser.add_argument('-y', '--hydroprep', help="Run Hydro Prep", action='store_true', default=False) parser.add_argument('-e', '--hydroexport', help="Run Hydro Model GIS export", action='store_true', default=False) parser.add_argument('-p', '--siteprops', help="Run Topo Site Properties", action='store_true', default=False) parser.add_argument('-a', '--topoauxmetrics', help="Run Topo + Aux Metrics", action='store_true', default=False) parser.add_argument('-c', '--cadexport', help="Run Cad Export", action='store_true', default=False) parser.add_argument('-s', '--substrate', help="Run Substrate Raster at D84", action='store_true', default=False) parser.add_argument('-b', '--bankfull', help='Run Bankfull Metrics', action='store_true', default=False) parser.add_argument( '--sourcefolder', help='(optional) Top level folder containing Topo Riverscapes projects', type=str) parser.add_argument('--years', help='(Optional) Years. One or comma delimited', type=str) parser.add_argument('--watersheds', help='(Optional) Watersheds. One or comma delimited', type=str) parser.add_argument('--sites', help='(Optional) Sites. One or comma delimited', type=str) parser.add_argument('--visits', help='(Optional) Visits. One or comma delimited', type=str) parser.add_argument( '--di', help= "(Optional) Di values for substrate (default=84). One or comma delimited", type=str) parser.add_argument( '--hydrofolder', help= '(Optional) source folder for hydro model resutls (hydroexport only)', type=str) parser.add_argument('--logfile', help='(Optional) output log db for batches', type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() yearsFilter = args.years.split(",") if args.years is not None else None sitesFilter = args.sites.split(",") if args.sites is not None else None watershedsFilter = args.watersheds.split( ",") if args.watersheds is not None else None visitsFilter = args.visits.split(",") if args.visits is not None else None di_values = [int(d) for d in args.di.split(",")] if args.di is not None else [84] # Make sure the output folder exists if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) # Set up log table - could be same db, but different table. logdb = SqliteLog( os.path.join(args.outputfolder, "export_log.db" ) if args.logfile is None else args.logfile) if args.bankfull: logdb.add_bankfull_metrics_table() setEnvFromFile(r"D:\.env") # Walk through folders for dirname, dirs, filenames in os.walk(args.sourcefolder): for filename in [os.path.join(dirname, name) for name in filenames]: if os.path.basename(filename) == "project.rs.xml": print filename # Get project details tree = ET.parse(filename) root = tree.getroot() visitid = root.findtext( "./MetaData/Meta[@name='Visit']") if root.findtext( "./MetaData/Meta[@name='Visit']" ) is not None else root.findtext( "./MetaData/Meta[@name='VisitID']") siteid = root.findtext( "./MetaData/Meta[@name='Site']") if root.findtext( "./MetaData/Meta[@name='Site']" ) is not None else root.findtext( "./MetaData/Meta[@name='SiteName']") watershed = root.findtext("./MetaData/Meta[@name='Watershed']") year = root.findtext( "./MetaData/Meta[@name='Year']") if root.findtext( "./MetaData/Meta[@name='Year']" ) is not None else root.findtext( "./MetaData/Meta[@name='FieldSeason']") if root.findtext("ProjectType") == "Topo": if (yearsFilter is None or year in yearsFilter) and \ (watershedsFilter is None or watershed in watershedsFilter) and \ (sitesFilter is None or siteid in sitesFilter) and \ (visitsFilter is None or visitid in visitsFilter): from lib.topoproject import TopoProject topo_project = TopoProject(filename) project_folder = dirname # Make visit level output folder resultsFolder = os.path.join( args.outputfolder, year, watershed, siteid, "VISIT_{}".format(str(visitid))) #, "models") if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) if args.validation: try: validationfolder = os.path.join( resultsFolder, "validation") if not os.path.isdir(validationfolder): os.makedirs(validationfolder) logg = Logger("Program") logfile = os.path.join(validationfolder, "validation.log") xmlfile = os.path.join(validationfolder, "validation.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) v_result = validation.validate( project_folder, xmlfile, visitid) logdb.write_log(year, watershed, siteid, visitid, "Validation", str(v_result), xmlfile) except Exception as e: logdb.write_log(year, watershed, siteid, visitid, "Validation", "Exception", traceback.format_exc()) if args.topometrics: try: topometricsfolder = os.path.join( resultsFolder, "topo_metrics") if not os.path.isdir(topometricsfolder): os.makedirs(topometricsfolder) logg = Logger("Program") logfile = os.path.join(topometricsfolder, "topo_metrics.log") xmlfile = os.path.join(topometricsfolder, "topo_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) #tm_result = topometrics.visitTopoMetrics(visitid, xmlfile, project_folder) #logdb.write_log(year,watershed, siteid, visitid, "TopoMetrics", str(tm_result), xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "TopoMetrics", "Exception", traceback.format_exc()) if args.hydroprep: try: hydroprepfolder = os.path.join( resultsFolder, "Hydro", "HydroModelInputs", "artifacts") if not os.path.isdir(hydroprepfolder): os.makedirs(hydroprepfolder) logg = Logger("Program") logfile = os.path.join(hydroprepfolder, "hydroprep.log") xmlfile = os.path.join(hydroprepfolder, "hydroprep.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) dem = topo_project.getpath("DEM") wsdem = topo_project.getpath("WaterSurfaceDEM") thalweg = topo_project.getpath("Thalweg") result = hydroPrep(dem, wsdem, thalweg, hydroprepfolder, True) logdb.write_log(year, watershed, siteid, visitid, "HydroPrep", str(result), xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "HydroPrep", "Exception", traceback.format_exc()) if args.siteprops: try: pass except: pass if args.topoauxmetrics: try: pass except: pass if args.cadexport: try: cadexportfolder = os.path.join( resultsFolder, "CADExport") if os.path.isdir(cadexportfolder): os.makedirs(cadexportfolder) logg = Logger("Program") logfile = os.path.join(cadexportfolder, "cad_export.log") xmlfile = os.path.join(cadexportfolder, "cad_export.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) ce_result = export_cad_files( filename, cadexportfolder) logdb.write_log(year, watershed, siteid, visitid, "CadExport", "Success", xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "CadExport", "Exception", traceback.format_exc()) if args.substrate: channel_units_json = path.join( project_folder, "ChannelUnits.json") if not os.path.isfile(channel_units_json): url = r"/visits/{}/measurements/Substrate%20Cover".format( str(visitid)) dict_occular = APICall( url) #, channel_units_json) else: dict_occular = json.load( open(channel_units_json, 'rt')) try: substratefolder = os.path.join( resultsFolder, "substrateD") if not os.path.isdir(substratefolder): os.makedirs(substratefolder) logg = Logger("Program") logfile = os.path.join(substratefolder, "substrate.log") xmlfile = os.path.join(substratefolder, "substrate.xml") logg.setup(logPath=logfile, verbose=args.verbose) result = generate_substrate_raster( project_folder, substratefolder, di_values, dict_occular) logdb.write_log(year, watershed, siteid, visitid, "SubstrateD".format(), str(result), xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "SubstrateD".format(), "Exception", traceback.format_exc()) if args.hydroexport: hydrobasefolder = args.hydrofolder if args.hydrofolder else args.sourcefolder hydrosearchfolder = os.path.join( hydrobasefolder, os.path.dirname( os.path.relpath(dirname, args.sourcefolder)) ) # todo: clunky but works. problem with spaces in folder names for dirname2, dirs2, filenames2 in os.walk( hydrosearchfolder): for filename2 in [ os.path.join(dirname2, name) for name in filenames2 ]: if os.path.basename( filename2) == "project.rs.xml": tree2 = ET.parse(filename2) root2 = tree2.getroot() visitid2 = root2.findtext( "./MetaData/Meta[@name='Visit']" ) if root2.findtext( "./MetaData/Meta[@name='Visit']" ) is not None else root2.findtext( "./MetaData/Meta[@name='VisitID']") if root2.findtext( "ProjectType" ) == "Hydro" and visitid2 == visitid: try: flow = root2.findtext( "./MetaData/Meta[@name='Flow']" ) hydroexportfolder = os.path.join( resultsFolder, "Hydro", "Results", flow, "GIS_Exports") if not os.path.isdir( hydroexportfolder): os.makedirs( hydroexportfolder) logg = Logger("Program") logfile = os.path.join( hydroexportfolder, "hydrogisexport.log") xmlfile = os.path.join( hydroexportfolder, "hydrogisexport.xml") logg.setup( logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) result = export_hydro_model( filename2, filename, hydroexportfolder) logdb.write_log( year, watershed, siteid, visitid, "HydroGISExport", 'Success for flow {}'. format(str(flow)), xmlfile) except: logdb.write_log( year, watershed, siteid, visitid, "HydroGISExport", "Exception", traceback.format_exc()) if args.bankfull: try: outfolder = os.path.join( resultsFolder, "BankfullMetrics") if os.path.isdir(outfolder): os.makedirs(outfolder) logg = Logger("Program") logfile = os.path.join(outfolder, "bankfull_metrics.log") xmlfile = os.path.join(outfolder, "bankfull_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) results = bankfull_metrics( topo_project.getpath("DEM"), topo_project.getpath("DetrendedDEM"), topo_project.getpath("Topo_Points")) # todo write xml? logdb.write_bankfull_metrics( year, watershed, siteid, visitid, results) logdb.write_log(year, watershed, siteid, visitid, "BankfullMetrics", "Success", xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "BankfullMetrics", "Exception", traceback.format_exc()) sys.exit(0)
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('visitID', help='the visit id of the site to use (no spaces)', type=str) parser.add_argument('outputfolder', help='Output folder', type=str) parser.add_argument( 'substrate_values', nargs='+', help= "one or more percentiles of grain size to calculate. 50 for D50, 84 for D84, etc", type=int) parser.add_argument('--out_channel_roughness_value', help="i.e. 4000.0", type=float, default=4000.0) parser.add_argument('--ocular_estimates', help="(optional) local json file of ocular estimates") parser.add_argument( '--datafolder', help= '(optional) local folder containing TopoMetrics Riverscapes projects', type=str) parser.add_argument('--env', "-e", help="(optional) local env file", type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() if not all([ args.visitID, args.outputfolder, args.substrate_values, args.out_channel_roughness_value ]): print "ERROR: Missing arguments" parser.print_help() if args.env: setEnvFromFile(args.env) # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") # Initiate the log file logg = Logger("Program") logfile = os.path.join(resultsFolder, "substrate_raster.log") logg.setup(logPath=logfile, verbose=args.verbose) # Fiona debug-level loggers can cause problems logging.getLogger("Fiona").setLevel(logging.ERROR) logging.getLogger("fiona").setLevel(logging.ERROR) logging.getLogger("fiona.collection").setLevel(logging.ERROR) logging.getLogger("shapely.geos").setLevel(logging.ERROR) logging.getLogger("rasterio").setLevel(logging.ERROR) try: # Make some folders if we need to: if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) # If we need to go get our own topodata.zip file and unzip it we do this if args.datafolder is None: topoDataFolder = os.path.join(args.outputfolder, "inputs") if not os.path.isdir(topoDataFolder): os.makedirs(topoDataFolder) fileJSON, projectFolder = downloadUnzipTopo( args.visitID, topoDataFolder) # otherwise just pass in a path to existing data else: projectFolder = args.datafolder if args.ocular_estimates is None: dict_ocular = APIGet( "visits/{}/measurements/Substrate%20Cover".format( str(args.visitID))) dict_units = APIGet("visits/{}/measurements/Channel%20Unit".format( str(args.visitID))) dict_unitkey = { x['value']['ChannelUnitID']: x['value']['ChannelUnitNumber'] for x in dict_units['values'] } for i in range(len(dict_ocular['values'])): dict_ocular['values'][i]['value'][ 'ChannelUnitNumber'] = dict_unitkey[ dict_ocular['values'][i]['value']['ChannelUnitID']] else: dict_ocular = json.load(open(args.ocular_estimates, 'rt')) dict_units = APIGet("visits/{}/measurements/Channel%20Unit".format( str(args.visitID))) dict_unitkey = { x['value']['ChannelUnitID']: x['value']['ChannelUnitNumber'] for x in dict_units['values'] } for i in range(len(dict_ocular['values'])): dict_ocular['values'][i]['value'][ 'ChannelUnitNumber'] = dict_unitkey[ dict_ocular['values'][i]['value']['ChannelUnitID']] generate_substrate_raster(projectFolder, resultsFolder, args.substrate_values, dict_ocular, args.out_channel_roughness_value) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: logg.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: logg.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def champ_topo_checker(workbench, folder): log = Logger('Topo Checker') log.setup(logPath=os.path.join( folder, datetime.now().strftime("%Y%m%d-%H%M%S") + '_topo_checker.log')) dbCon = sqlite3.connect(workbench) dbCurs = dbCon.cursor() dbCurs.execute( 'SELECT WatershedName, VisitYear, SiteName, VisitID' + ' FROM vwVisits WHERE ProgramID = 1 AND ProtocolID IN (2030, 416, 806, 1966, 2020, 1955, 1880, 10036, 9999)' ) file_exists = 0 file_zero = 0 file_download = [] file_errors = [] for row in dbCurs.fetchall(): watershed = row[0] visit_year = row[1] site = row[2] visitID = row[3] topo_path = os.path.join(folder, str(visit_year), watershed.replace(' ', ''), site, 'VISIT_{}'.format(visitID), 'Field Folders', 'Topo', 'TopoData.zip') download_needed = False if os.path.isfile(topo_path): file_exists += 1 if os.stat(topo_path).st_size == 0: file_zero += 0 download_needed = True else: download_needed = True if not download_needed: continue file_download.append(topo_path) try: topoFieldFolders = APIGet( 'visits/{}/fieldFolders/Topo'.format(visitID)) file = next(file for file in topoFieldFolders['files'] if file['componentTypeID'] == 181) downloadUrl = file['downloadUrl'] except Exception, e: log.warning('No topo data for visit information {}: {}'.format( visitID, topo_path)) continue # Download the file to a temporary location if not os.path.isdir(os.path.dirname(topo_path)): os.makedirs(os.path.dirname(topo_path)) with open(topo_path, 'w+b') as f: response = APIGet(downloadUrl, absolute=True) f.write(response.content) log.info(topo_path) log.info('Downloaded {}'.format(topo_path))