def __init__(self, sFolder, visitID): self.directory = sFolder self.visitID = visitID self.Channels = {'Wetted': Channel(), 'Bankfull': Channel()} self.DEM = "" self.Depth = "" self.WaterSurface = "" self.ChannelUnits = "" self.Thalweg = "" self.TopoPoints = "" # This object will be empty if there is no project.rs.xml file in the sFolder self.riverscapes = TopoProject(sFolder)
def load_topo_project(self, directory, visitID): """ :param directory: full path to directory of survey, i.e. C://Visit//GISLayers :type directory: str :param channelunitsjson: path to channel unit json file :return: None """ # If we have a project.rs.xml file tp gets a value tp = TopoProject(directory) def getPath(layername): """ This is a tricky little method. If there is a project.rs.xml file (maude) and we have a tp.getNamePath method then run it. Otherwise just return the 'directory' variable (harold) """ try: return tp.getpath(layername) except Exception, e: # This is kind of a weird thing to do. There will be nothing in this path but this will let things # fail gracefully return os.path.join(directory, "FILENOTFOUND.TIF")
def processZipFile(invVisit, APIVisit): dirpath = tempfile.mkdtemp() try: doupload = False # Copy some useful things from the API: for k in ['status', 'lastMeasurementChange', 'lastUpdated', 'name']: invVisit[k] = APIVisit[k] file, projpath = downloadUnzipTopo(APIVisit['id'], dirpath) topo = TopoProject(projpath) invVisit["topozip"] = True latestRealizationdate = latestRealizationDate(topo) if "latestRealization" not in invVisit or APIstrtodate(invVisit["latestRealization"]) >= latestRealizationdate: invVisit["latestRealization"] = APIdatetostr(latestRealizationdate) invVisit["size"] = file['size'] uploadProjectToRiverscapes(dirpath, invVisit) except MissingException, e: invVisit["topozip"] = False invVisit["error"] = e.message
def downloadExtractParseVisits(visits, outputFolder): log = Logger('Downloading') log.info("Downloading all visits from the API") projects = [] for visit in visits: try: extractpath = os.path.join(outputFolder, 'VISIT_{}'.format(visit)) projpath = os.path.join(extractpath, 'project.rs.xml') downloadUnzipTopo(visit, extractpath) proj = TopoProject(extractpath) if proj.isrsproject: projects.append({"project": proj, "visit": visit}) else: log.error("File not found: {}".format(projpath)) raise DataException("Missing Project File") # Just move on if something fails except Exception, e: pass
def main(): # parse command line options parser = argparse.ArgumentParser() parser.add_argument('visitID', help='Visit ID', type=int) parser.add_argument('outputfolder', help='Path to output folder', type=str) parser.add_argument( '--datafolder', help= '(optional) Top level folder containing TopoMetrics Riverscapes projects', type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() # Make sure the output folder exists resultsFolder = os.path.join(args.outputfolder, "outputs") # Initiate the log file logg = Logger("Program") logfile = os.path.join(resultsFolder, "bankfull_metrics.log") xmlfile = os.path.join(resultsFolder, "bankfull_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) try: # Make some folders if we need to: if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) # If we need to go get our own topodata.zip file and unzip it we do this if args.datafolder is None: topoDataFolder = os.path.join(args.outputfolder, "inputs") fileJSON, projectFolder = downloadUnzipTopo( args.visitID, topoDataFolder) # otherwise just pass in a path to existing data else: projectFolder = args.datafolder from lib.topoproject import TopoProject topo_project = TopoProject( os.path.join(projectFolder, "project.rs.xml")) tree = ET.parse(os.path.join(projectFolder, "project.rs.xml")) root = tree.getroot() visitid = root.findtext( "./MetaData/Meta[@name='Visit']") if root.findtext( "./MetaData/Meta[@name='Visit']" ) is not None else root.findtext( "./MetaData/Meta[@name='VisitID']") finalResult = bankfull_metrics(topo_project.getpath("DEM"), topo_project.getpath("DetrendedDEM"), topo_project.getpath("Topo_Points")) write_bfmetrics_xml(finalResult, visitid, xmlfile) sys.exit(0) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: log.error(e.message) traceback.print_exc(file=sys.stdout) sys.exit(1)
def hydro_gis_export(hydro_project_xml, topo_project_xml, outfolder): """ :param jsonFilePath: :param outputFolder: :param bVerbose: :return: """ #gdal.UseExceptions() log = Logger("Hydro GIS Export") # 1 todo Read project.rs.xml rs_hydro = Project(hydro_project_xml) rs_topo = TopoProject(topo_project_xml) hydro_results_folder = os.path.dirname(hydro_project_xml) if not rs_hydro.ProjectMetadata.has_key("Visit"): raise MissingException("Cannot Find Visit ID") visit_id = rs_hydro.ProjectMetadata['Visit'] dem = gdal.Open(rs_topo.getpath("DEM")) dem_srs = dem.GetProjection() dem_x_size = dem.RasterXSize dem_y_size = dem.RasterYSize dem_band = dem.GetRasterBand(1) dem_ndv = dem_band.GetNoDataValue() dem_geotransfrom = dem.GetGeoTransform() # 3 Get data columns in csv file csvfile = os.path.join(hydro_results_folder, "dem_grid_results.csv") csvfile_clean = os.path.join(hydro_results_folder, "dem_grid_results_clean_header.csv") if not os.path.isfile(csvfile): raise MissingException("Required file {} does not exist.".format(csvfile)) with open(csvfile, "rb") as f_in, open(csvfile_clean, "wb") as f_out: reader = csv.reader(f_in) # writer = csv.writer(f_out) cols = [col for col in reader.next() if col not in ["Y", "X"]]#[col.replace(".", "_") for col in reader.next() if col not in ["Y", "X"]] log.info("Loaded fields from csv file.") # writer.writerow(['X', 'Y'] + cols) # for row in reader: # writer.writerow(row) # log.info("Saved csv file with sanitized headers.") # Write VRT file vrt = os.path.join(hydro_results_folder, '{}.vrt'.format("dem_grid_results")) with open(vrt, 'wt') as f: f.write('<OGRVRTDataSource>\n') f.write('\t<OGRVRTLayer name="{}">\n'.format("dem_grid_results")) f.write('\t\t<SrcDataSource>{}</SrcDataSource>\n'.format(csvfile)) f.write('\t\t<SrcLayer>{}</SrcLayer>\n'.format("dem_grid_results")) f.write('\t\t<GeometryType>wkbPoint25D</GeometryType>\n') f.write('\t\t<LayerSRS>{}</LayerSRS>\n'.format(dem_srs)) f.write('\t\t<GeometryField encoding="PointFromColumns" x="X" y="Y" />\n') for field in cols: f.write('\t\t<Field name="{}" type="Real" subtype="Float32" />\n'.format(field)) f.write('\t</OGRVRTLayer>\n') f.write('</OGRVRTDataSource>\n') log.info("Generated vrt file {}".format(vrt)) # Open csv as OGR ogr_vrt = ogr.Open(vrt, 1) if ogr_vrt is None: raise DataException("unable to open {}".format(vrt)) layer = ogr_vrt.GetLayer() # 4 Generate geotiff for each column in the CSV file driver = gdal.GetDriverByName("GTiff") for col in cols: out_tif = os.path.join(outfolder, '{}.tif'.format(col)) out_raster = driver.Create(out_tif, dem_x_size, dem_y_size, 1, gdalconst.GDT_Float32) out_raster.SetGeoTransform(dem_geotransfrom) out_raster.SetProjection(dem_srs) band = out_raster.GetRasterBand(1) band.SetNoDataValue(dem_ndv) band.FlushCache() gdal.RasterizeLayer(out_raster, [1], layer, options=["ATTRIBUTE={}".format(col)]) band.GetStatistics(0, 1) band.FlushCache() out_raster.FlushCache() log.info("Generated {} for attribute {}".format(out_tif, col)) if col == "Depth": raw = numpy.array(band.ReadAsArray()) masked = numpy.ma.masked_array(raw, raw == dem_ndv) bool_raster = numpy.array(masked, "bool") numpy.greater(masked, 0, bool_raster) raster_mem = gdal.GetDriverByName("GTIFF").Create(os.path.join(outfolder, "Temp.tif"), dem_x_size, dem_y_size, 1, gdalconst.GDT_Int16) raster_mem.SetGeoTransform(dem_geotransfrom) raster_mem.SetProjection(dem_srs) band_mem = raster_mem.GetRasterBand(1) band_mem.WriteArray(bool_raster, 0, 0) band_mem.SetNoDataValue(dem_ndv) band_mem.FlushCache() temp = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(os.path.join(outfolder, "TempExtent.shp")) temp_layer = temp.CreateLayer("RawExtent", osr.SpatialReference(wkt=dem_srs), ogr.wkbPolygon) temp_layer.CreateField(ogr.FieldDefn("Value", ogr.OFTInteger)) temp_layer.CreateField(ogr.FieldDefn("Area", ogr.OFTReal)) gdal.Polygonize(band_mem, None, temp_layer, 0) del raster_mem # # for feature in temp_layer: # feature.SetField("Area", feature.GetGeometryRef().GetArea()) # temp_layer.SetFeature(feature) # Stage Extent # temp_layer.SetAttributeFilter("Value=1") # shp_extent = os.path.join(outfolder, "StageExtent.shp") # driver_extent = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(shp_extent) # driver_extent.CopyLayer(temp_layer, "StageExtent") # driver_extent = None # ogr_extent = ogr.Open(shp_extent, 1) # layer_extent = ogr_extent.GetLayer("StageExtent") # field_extent = ogr.FieldDefn("ExtentType", ogr.OFTString) # layer_extent.CreateField(field_extent) # area_current = 0.0 # fid_current = None # for feature in layer_extent: # area_feat = feature.GetGeometryRef().GetArea() # if area_feat > area_current: # area_current = area_feat # fid_current = feature.GetFID() # # edit_feat = layer_extent.GetFeature(fid_current) # edit_feat.SetField("ExtentType", "Channel") # layer_extent.SetFeature(edit_feat) # # layer_extent.DeleteField(layer_extent.FindFieldIndex("Value", True)) # #ogr_extent.Destroy() # log.info("Generated Stage Extent Shapefile {}".format(shp_extent)) # # # Stage Islands # import time # time.sleep(5) # temp_layer.ResetReading() # temp_layer.SetAttributeFilter("Value=0") # shp_islands = os.path.join(outfolder, "StageIslands.shp") # driver_islands = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(shp_islands) # driver_islands.CopyLayer(temp_layer, "StageIslands") # driver_islands = None # ogr_islands = ogr.Open(shp_islands, 1) # layer_islands = ogr_islands.GetLayer("StageIslands") # # field_qual = ogr.FieldDefn("Qualifying", ogr.OFTInteger) # field_qual.SetDefault("0") # field_valid = ogr.FieldDefn("IsValid", ogr.OFTInteger) # field_valid.SetDefault("0") # layer_islands.CreateField(field_qual) # layer_islands.CreateField(field_valid) # layer_islands.SyncToDisk() # # area_current = 0.0 # fid_current = None # for feature in layer_islands: # if feature is not None: # g = feature.GetGeometryRef() # area_feat = g.GetArea() # # todo identify qualifying islands here? # if area_feat > area_current: # area_current = area_feat # fid_current = feature.GetFID() # # #feat_del = layer_islands.GetFeature(fid_current) # layer_islands.DeleteFeature(fid_current) # # layer_islands.DeleteField(layer_islands.FindFieldIndex("Value", True)) # ogr_islands = None # ogr_extent = None # log.info("Generated Stage Islands Shapefile {}".format(shp_islands)) temp = None del out_raster shp_hydroresults = os.path.join(outfolder, "HydroResults.shp") ogr.GetDriverByName("ESRI Shapefile").CopyDataSource(ogr_vrt, shp_hydroresults) #out_shp = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource() # ogr_shp = ogr.Open(shp_hydroresults, 1) # lyr = ogr_shp.GetLayer() # lyr_defn = lyr.GetLayerDefn() # for i in range(lyr_defn.GetFieldCount()): # fielddefn = lyr_defn.GetFieldDefn(i) # fielddefn.SetName(fielddefn.GetName().replace(".","_")) # lyr.AlterFieldDefn(i, fielddefn, ogr.ALTER_NAME_FLAG) # # new_field = ogr.FieldDefn('V_Bearing', ogr.OFTReal) # lyr.CreateField(new_field) # # Calculate Velocity Bearing # for feat in lyr: # vel_x = feat.GetField("X_Velocity") # vel_y = feat.GetField("Y_Velocity") # dir = 90 - math.degrees(math.atan2(float(vel_y), float(vel_x))) # bearing = 360 + dir if dir < 0 else dir # feat.SetField('V_Bearing', float(bearing)) # lyr.SetFeature(feat) log.info("Generated Hydro Results Shapefile {}".format(shp_hydroresults)) ogr_vrt = None ogr_shp = None return 0
def main(): # parse command line options parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( dest='mode', help='For help type `hydroprep.py manual -h`') # The manual subparser is for when we know explicit paths manual = subparsers.add_parser('manual', help='manual help') manual.add_argument('dem', help='DEM raster path', type=argparse.FileType('r')) manual.add_argument('wsdem', help='Water surface raster path', type=argparse.FileType('r')) manual.add_argument('thalweg', help='Thalweg ShapeFile path', type=argparse.FileType('r')) manual.add_argument('outputfolder', help='Output folder') manual.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) # The project subparser is when we want to pass in a project.rs.xml file project = subparsers.add_parser('project', help='project help') project.add_argument('visitID', help='Visit ID', type=int) project.add_argument('outputfolder', help='Path to output folder', type=str) project.add_argument( '--datafolder', help= '(optional) Top level folder containing TopoMetrics Riverscapes projects', type=str) project.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() try: if args.mode == "project": resultsFolder = os.path.join(args.outputfolder, "outputs") if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) # If we need to go get our own topodata.zip file and unzip it we do this if args.datafolder is None: topoDataFolder = os.path.join(args.outputfolder, "inputs") fileJSON, projectFolder = downloadUnzipTopo( args.visitID, topoDataFolder) # otherwise just pass in a path to existing data else: projectFolder = args.datafolder tp = TopoProject(projectFolder) funcargs = (tp.getpath("DEM"), tp.getpath("WaterSurfaceDEM"), tp.getpath("Thalweg"), resultsFolder, args.verbose) else: funcargs = (args.dem.name, args.wsdem.name, args.thalweg.name, args.outputfolder, args.verbose) hydroPrep(*funcargs) except (DataException, MissingException, NetworkException) as e: # Exception class prints the relevant information traceback.print_exc(file=sys.stdout) sys.exit(e.returncode) except AssertionError as e: traceback.print_exc(file=sys.stdout) sys.exit(1) except Exception as e: traceback.print_exc(file=sys.stdout) sys.exit(1) sys.exit(0)
def channelUnitScraper(outputDir, watersheds): visitData = {} for watershed, apiName in watersheds.iteritems(): visitData[watershed] = {} loadVisitData(watershed, apiName, visitData[watershed]) for watershed, visits in visitData.iteritems(): outPath = os.path.join(outputDir, watershed.replace(' ', '') + ".shp") outShape = None featureID = 0 for visitID, visit in visits.iteritems(): if len(visit['ChannelUnits']) < 1: continue try: dirpath = tempfile.mkdtemp() # Open the visit channel unit shapefile. # Need the spatial reference from one of the visits to create the output watershed shapefile try: fileJSON, projPath = downloadUnzipTopo(visitID, dirpath) topo = TopoProject(projPath) cuPath = topo.getpath('ChannelUnits') except (DataException, MissingException), e: print "Error retrieving channel units ShapeFile for visit {0}".format( visitID) continue try: shpCU = Shapefile(cuPath) except Exception, e: print "Error OGR opening channel unit ShapeFile for visit {0}".format( visitID) continue if not outShape: # Create new ShapeFile for this watershed outShape = Shapefile() outShape.create(outPath, shpCU.spatialRef, geoType=ogr.wkbPolygon) outShape.createField("ID", ogr.OFTInteger) outShape.createField("Watershed", ogr.OFTString) outShape.createField("Site", ogr.OFTString) outShape.createField("VisitID", ogr.OFTInteger) outShape.createField("SampleYear", ogr.OFTInteger) outShape.createField("Org", ogr.OFTString) outShape.createField("UnitNumber", ogr.OFTInteger) outShape.createField("UnitArea", ogr.OFTReal) outShape.createField("Tier1", ogr.OFTString) outShape.createField("Tier2", ogr.OFTString) outShape.createField("AvgSiteWid", ogr.OFTReal) outShape.createField("ReachLen", ogr.OFTReal) # Loop over all channel unit polygons for this visit feats = shpCU.featuresToShapely() for aFeat in feats: featureID += 1 cuNumber = aFeat['fields']['UnitNumber'] featureDefn = outShape.layer.GetLayerDefn() outFeature = ogr.Feature(featureDefn) outFeature.SetField('ID', featureID) outFeature.SetField('Watershed', visit['Watershed']) outFeature.SetField('Site', visit['Site']) outFeature.SetField('VisitID', visitID) outFeature.SetField('SampleYear', visit['SampleYear']) outFeature.SetField('Org', visit['Organization']) outFeature.SetField('AvgSiteWid', visit['AverageSiteWidth']) outFeature.SetField('ReachLen', visit['TotalReachLength']) outFeature.SetField('UnitNumber', cuNumber) outFeature.SetField( 'Tier1', visit['ChannelUnits'][cuNumber]['Tier1']) outFeature.SetField( 'Tier2', visit['ChannelUnits'][cuNumber]['Tier2']) outFeature.SetField('UnitArea', aFeat['geometry'].area) outFeature.SetGeometry( ogr.CreateGeometryFromJson( json.dumps(mapping(aFeat['geometry'])))) outShape.layer.CreateFeature(outFeature) finally:
def export_hydro_model(hydro_rs_xml, topo_rs_xml, out_path): log = Logger("Hydro GIS Export") # 1 todo Read project.rs.xml rs_hydro = Project(hydro_rs_xml) rs_topo = TopoProject(topo_rs_xml) hydro_results_folder = os.path.dirname(hydro_rs_xml) csvfile_hydro = os.path.join( hydro_results_folder, "dem_grid_results.csv") # todo get this from hydro project xml if not rs_hydro.ProjectMetadata.has_key("Visit"): raise MissingException("Cannot Find Visit ID") visit_id = rs_hydro.ProjectMetadata['Visit'] df_csv = pandas.read_csv(csvfile_hydro) log.info("Read hydro results csv as data frame") # Get DEM Props with rasterio.open(rs_topo.getpath("DEM")) as rio_dem: dem_crs = rio_dem.crs dem_bounds = rio_dem.bounds dem_nodata = rio_dem.nodata out_transform = rasterio.transform.from_origin(dem_bounds.left, dem_bounds.top, 0.1, 0.1) pad_top = int((dem_bounds.top - max(df_csv['Y'])) / 0.1) pad_bottom = int((min(df_csv['Y']) - dem_bounds.bottom) / 0.1) pad_right = int((dem_bounds.right - max(df_csv['X'])) / 0.1) pad_left = int((min(df_csv['X']) - dem_bounds.left) / 0.1) log.info("Read DEM properties") # generate shp geom = [Point(xy) for xy in zip(df_csv.X, df_csv.Y)] df_output = df_csv.drop( ["X", "Y", "Depth.Error", "WSE", "BedLevel"], axis="columns") #, inplace=True) # save a bit of space gdf_hydro = geopandas.GeoDataFrame(df_output, geometry=geom) gdf_hydro.crs = dem_crs gdf_hydro.columns = gdf_hydro.columns.str.replace(".", "_") gdf_hydro["VelDir"] = numpy.subtract( 90, numpy.degrees( numpy.arctan2(gdf_hydro["Y_Velocity"], gdf_hydro["X_Velocity"]))) gdf_hydro["VelBearing"] = numpy.where(gdf_hydro['VelDir'] < 0, 360 + gdf_hydro["VelDir"], gdf_hydro["VelDir"]) gdf_hydro.drop("VelDir", axis="columns", inplace=True) #gdf_hydro.to_file(os.path.join(out_path, "HydroResults.shp")) del df_output, gdf_hydro log.info("Generated HydroResults.shp") for col in [ col for col in df_csv.columns if col not in ["X", "Y", "X.Velocity", "Y.Velocity"] ]: df_pivot = df_csv.pivot(index="Y", columns="X", values=col) np_raw = df_pivot.iloc[::-1].as_matrix() np_output = numpy.pad(np_raw, ((pad_top, pad_bottom), (pad_left, pad_right)), mode="constant", constant_values=numpy.nan) with rasterio.open(os.path.join(out_path, "{}.tif".format(col)), 'w', driver='GTiff', height=np_output.shape[0], width=np_output.shape[1], count=1, dtype=np_output.dtype, crs=dem_crs, transform=out_transform, nodata=dem_nodata) as rio_output: rio_output.write(np_output, 1) log.info("Generated output Raster for {}".format(col)) if col == "Depth": # Generate water extent polygon np_extent = numpy.greater(np_output, 0) mask = numpy.isfinite(np_output) shapes = features.shapes(np_extent.astype('float32'), mask, transform=out_transform) gdf_extent_raw = geopandas.GeoDataFrame.from_features( geopandas.GeoSeries([asShape(s) for s, v in shapes])) gdf_extent = geopandas.GeoDataFrame.from_features( gdf_extent_raw.geometry.simplify(0.5)) gdf_extent.crs = dem_crs gdf_extent['Area'] = gdf_extent.geometry.area gdf_extent['Extent'] = "" gdf_extent.set_value( gdf_extent.index[gdf_extent['Area'].idxmax()], "Extent", "Channel") # Set largest Polygon as Main Channel gdf_extent.to_file(os.path.join(out_path, "StageExtent.shp")) log.info("Generated Water Extent Polygons") # Generate islands and spatial join existing islands attributes gdf_exterior = geopandas.GeoDataFrame.from_features( geopandas.GeoSeries([ Polygon(shape) for shape in gdf_extent.geometry.exterior ])) gs_diff = gdf_exterior.geometry.difference(gdf_extent.geometry) if not all(g.is_empty for g in gs_diff): gdf_islands_raw = geopandas.GeoDataFrame.from_features( geopandas.GeoSeries( [shape for shape in gs_diff if not shape.is_empty])) gdf_islands_explode = geopandas.GeoDataFrame.from_features( gdf_islands_raw.geometry.explode()) gdf_islands_clean = geopandas.GeoDataFrame.from_features( gdf_islands_explode.buffer(0)) gdf_islands_clean.crs = dem_crs if fiona.open(rs_topo.getpath("WettedIslands")).__len__( ) > 0: # Exception when createing gdf if topo islands shapefile is empty feature class gdf_topo_islands = geopandas.GeoDataFrame.from_file( rs_topo.getpath("WettedIslands")) gdf_islands_sj = geopandas.sjoin(gdf_islands_clean, gdf_topo_islands, how="left", op="intersects") gdf_islands_sj.drop(["index_right", "OBJECTID"], axis="columns", inplace=True) gdf_islands_sj.crs = dem_crs gdf_islands_sj.to_file( os.path.join(out_path, "StageIslands.shp")) #todo: Generate Lyr file and copy #todo: Generate readme return 0
def main(): """Run one or more models on local CHaMP/AEM visits. Make sure command prompt is open with the appropriate environment for the model(s) to be run.""" parser = argparse.ArgumentParser(description=main.__doc__) parser.add_argument('outputfolder', help='Path to output folder', type=str) parser.add_argument('-v', '--validation', help="Run Validation", action='store_true', default=False) parser.add_argument('-m', '--topometrics', help="Run Topo Metrics", action='store_true', default=False) parser.add_argument('-y', '--hydroprep', help="Run Hydro Prep", action='store_true', default=False) parser.add_argument('-e', '--hydroexport', help="Run Hydro Model GIS export", action='store_true', default=False) parser.add_argument('-p', '--siteprops', help="Run Topo Site Properties", action='store_true', default=False) parser.add_argument('-a', '--topoauxmetrics', help="Run Topo + Aux Metrics", action='store_true', default=False) parser.add_argument('-c', '--cadexport', help="Run Cad Export", action='store_true', default=False) parser.add_argument('-s', '--substrate', help="Run Substrate Raster at D84", action='store_true', default=False) parser.add_argument('-b', '--bankfull', help='Run Bankfull Metrics', action='store_true', default=False) parser.add_argument( '--sourcefolder', help='(optional) Top level folder containing Topo Riverscapes projects', type=str) parser.add_argument('--years', help='(Optional) Years. One or comma delimited', type=str) parser.add_argument('--watersheds', help='(Optional) Watersheds. One or comma delimited', type=str) parser.add_argument('--sites', help='(Optional) Sites. One or comma delimited', type=str) parser.add_argument('--visits', help='(Optional) Visits. One or comma delimited', type=str) parser.add_argument( '--di', help= "(Optional) Di values for substrate (default=84). One or comma delimited", type=str) parser.add_argument( '--hydrofolder', help= '(Optional) source folder for hydro model resutls (hydroexport only)', type=str) parser.add_argument('--logfile', help='(Optional) output log db for batches', type=str) parser.add_argument('--verbose', help='Get more information in your logs.', action='store_true', default=False) args = parser.parse_args() yearsFilter = args.years.split(",") if args.years is not None else None sitesFilter = args.sites.split(",") if args.sites is not None else None watershedsFilter = args.watersheds.split( ",") if args.watersheds is not None else None visitsFilter = args.visits.split(",") if args.visits is not None else None di_values = [int(d) for d in args.di.split(",")] if args.di is not None else [84] # Make sure the output folder exists if not os.path.isdir(args.outputfolder): os.makedirs(args.outputfolder) # Set up log table - could be same db, but different table. logdb = SqliteLog( os.path.join(args.outputfolder, "export_log.db" ) if args.logfile is None else args.logfile) if args.bankfull: logdb.add_bankfull_metrics_table() setEnvFromFile(r"D:\.env") # Walk through folders for dirname, dirs, filenames in os.walk(args.sourcefolder): for filename in [os.path.join(dirname, name) for name in filenames]: if os.path.basename(filename) == "project.rs.xml": print filename # Get project details tree = ET.parse(filename) root = tree.getroot() visitid = root.findtext( "./MetaData/Meta[@name='Visit']") if root.findtext( "./MetaData/Meta[@name='Visit']" ) is not None else root.findtext( "./MetaData/Meta[@name='VisitID']") siteid = root.findtext( "./MetaData/Meta[@name='Site']") if root.findtext( "./MetaData/Meta[@name='Site']" ) is not None else root.findtext( "./MetaData/Meta[@name='SiteName']") watershed = root.findtext("./MetaData/Meta[@name='Watershed']") year = root.findtext( "./MetaData/Meta[@name='Year']") if root.findtext( "./MetaData/Meta[@name='Year']" ) is not None else root.findtext( "./MetaData/Meta[@name='FieldSeason']") if root.findtext("ProjectType") == "Topo": if (yearsFilter is None or year in yearsFilter) and \ (watershedsFilter is None or watershed in watershedsFilter) and \ (sitesFilter is None or siteid in sitesFilter) and \ (visitsFilter is None or visitid in visitsFilter): from lib.topoproject import TopoProject topo_project = TopoProject(filename) project_folder = dirname # Make visit level output folder resultsFolder = os.path.join( args.outputfolder, year, watershed, siteid, "VISIT_{}".format(str(visitid))) #, "models") if not os.path.isdir(resultsFolder): os.makedirs(resultsFolder) if args.validation: try: validationfolder = os.path.join( resultsFolder, "validation") if not os.path.isdir(validationfolder): os.makedirs(validationfolder) logg = Logger("Program") logfile = os.path.join(validationfolder, "validation.log") xmlfile = os.path.join(validationfolder, "validation.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) v_result = validation.validate( project_folder, xmlfile, visitid) logdb.write_log(year, watershed, siteid, visitid, "Validation", str(v_result), xmlfile) except Exception as e: logdb.write_log(year, watershed, siteid, visitid, "Validation", "Exception", traceback.format_exc()) if args.topometrics: try: topometricsfolder = os.path.join( resultsFolder, "topo_metrics") if not os.path.isdir(topometricsfolder): os.makedirs(topometricsfolder) logg = Logger("Program") logfile = os.path.join(topometricsfolder, "topo_metrics.log") xmlfile = os.path.join(topometricsfolder, "topo_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) #tm_result = topometrics.visitTopoMetrics(visitid, xmlfile, project_folder) #logdb.write_log(year,watershed, siteid, visitid, "TopoMetrics", str(tm_result), xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "TopoMetrics", "Exception", traceback.format_exc()) if args.hydroprep: try: hydroprepfolder = os.path.join( resultsFolder, "Hydro", "HydroModelInputs", "artifacts") if not os.path.isdir(hydroprepfolder): os.makedirs(hydroprepfolder) logg = Logger("Program") logfile = os.path.join(hydroprepfolder, "hydroprep.log") xmlfile = os.path.join(hydroprepfolder, "hydroprep.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) dem = topo_project.getpath("DEM") wsdem = topo_project.getpath("WaterSurfaceDEM") thalweg = topo_project.getpath("Thalweg") result = hydroPrep(dem, wsdem, thalweg, hydroprepfolder, True) logdb.write_log(year, watershed, siteid, visitid, "HydroPrep", str(result), xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "HydroPrep", "Exception", traceback.format_exc()) if args.siteprops: try: pass except: pass if args.topoauxmetrics: try: pass except: pass if args.cadexport: try: cadexportfolder = os.path.join( resultsFolder, "CADExport") if os.path.isdir(cadexportfolder): os.makedirs(cadexportfolder) logg = Logger("Program") logfile = os.path.join(cadexportfolder, "cad_export.log") xmlfile = os.path.join(cadexportfolder, "cad_export.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) ce_result = export_cad_files( filename, cadexportfolder) logdb.write_log(year, watershed, siteid, visitid, "CadExport", "Success", xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "CadExport", "Exception", traceback.format_exc()) if args.substrate: channel_units_json = path.join( project_folder, "ChannelUnits.json") if not os.path.isfile(channel_units_json): url = r"/visits/{}/measurements/Substrate%20Cover".format( str(visitid)) dict_occular = APICall( url) #, channel_units_json) else: dict_occular = json.load( open(channel_units_json, 'rt')) try: substratefolder = os.path.join( resultsFolder, "substrateD") if not os.path.isdir(substratefolder): os.makedirs(substratefolder) logg = Logger("Program") logfile = os.path.join(substratefolder, "substrate.log") xmlfile = os.path.join(substratefolder, "substrate.xml") logg.setup(logPath=logfile, verbose=args.verbose) result = generate_substrate_raster( project_folder, substratefolder, di_values, dict_occular) logdb.write_log(year, watershed, siteid, visitid, "SubstrateD".format(), str(result), xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "SubstrateD".format(), "Exception", traceback.format_exc()) if args.hydroexport: hydrobasefolder = args.hydrofolder if args.hydrofolder else args.sourcefolder hydrosearchfolder = os.path.join( hydrobasefolder, os.path.dirname( os.path.relpath(dirname, args.sourcefolder)) ) # todo: clunky but works. problem with spaces in folder names for dirname2, dirs2, filenames2 in os.walk( hydrosearchfolder): for filename2 in [ os.path.join(dirname2, name) for name in filenames2 ]: if os.path.basename( filename2) == "project.rs.xml": tree2 = ET.parse(filename2) root2 = tree2.getroot() visitid2 = root2.findtext( "./MetaData/Meta[@name='Visit']" ) if root2.findtext( "./MetaData/Meta[@name='Visit']" ) is not None else root2.findtext( "./MetaData/Meta[@name='VisitID']") if root2.findtext( "ProjectType" ) == "Hydro" and visitid2 == visitid: try: flow = root2.findtext( "./MetaData/Meta[@name='Flow']" ) hydroexportfolder = os.path.join( resultsFolder, "Hydro", "Results", flow, "GIS_Exports") if not os.path.isdir( hydroexportfolder): os.makedirs( hydroexportfolder) logg = Logger("Program") logfile = os.path.join( hydroexportfolder, "hydrogisexport.log") xmlfile = os.path.join( hydroexportfolder, "hydrogisexport.xml") logg.setup( logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) result = export_hydro_model( filename2, filename, hydroexportfolder) logdb.write_log( year, watershed, siteid, visitid, "HydroGISExport", 'Success for flow {}'. format(str(flow)), xmlfile) except: logdb.write_log( year, watershed, siteid, visitid, "HydroGISExport", "Exception", traceback.format_exc()) if args.bankfull: try: outfolder = os.path.join( resultsFolder, "BankfullMetrics") if os.path.isdir(outfolder): os.makedirs(outfolder) logg = Logger("Program") logfile = os.path.join(outfolder, "bankfull_metrics.log") xmlfile = os.path.join(outfolder, "bankfull_metrics.xml") logg.setup(logPath=logfile, verbose=args.verbose) # Initiate the log file log = Logger("Program") log.setup(logPath=logfile, verbose=args.verbose) results = bankfull_metrics( topo_project.getpath("DEM"), topo_project.getpath("DetrendedDEM"), topo_project.getpath("Topo_Points")) # todo write xml? logdb.write_bankfull_metrics( year, watershed, siteid, visitid, results) logdb.write_log(year, watershed, siteid, visitid, "BankfullMetrics", "Success", xmlfile) except: logdb.write_log(year, watershed, siteid, visitid, "BankfullMetrics", "Exception", traceback.format_exc()) sys.exit(0)
def loadlayersproj(self): tp = TopoProject() self.Channels['Wetted'] = tp['Weted']