def _calc(undercutVals, visitTopoVals): """ Calculate undercut metrics :param undercutVals: dictionary of undercut API data :param visitTopoVals: dictionary of visit topo metrics :return: metrics dictionary """ # initialize all metrics as zero dMetrics = { 'Length' : 0.0, 'LengthPercent' : 0.0, 'Area' : 0.0, 'AreaPerecent' : 0.0 } if len(undercutVals) > 0: # Calculate the total undercut length and area for undercut in undercutVals: dMetrics['Length'] += undercut['EstimatedLength'] try: dMetrics['Area'] += undercut['EstimatedLength'] * (undercut['Width25Percent'] + undercut['Width50Percent'] + undercut['Width75Percent']) / 3.0 except TypeError, e: raise DataException("Undercut: Unhandled 'None' values during length calculation") # Calculate the percent length and area of the site that is undercut if visitTopoVals['Lgth_Wet'] is None: raise DataException("Lgth_Wet cannot be null") if visitTopoVals['Area_Wet'] is None: raise DataException("Area_Wet cannot be null") dMetrics['LengthPercent'] = dMetrics['Length'] / (visitTopoVals['Lgth_Wet'] * 100 / 2) dMetrics['AreaPerecent'] = dMetrics['Area'] / (visitTopoVals['Area_Wet'] + dMetrics['Area']) * 100
def _CenterlineSummaryMetrics(self, dMetrics): """ :param dMetrics: :return: """ lMainParts = [x for x in dMetrics.itervalues() if x['Type'] == 'Main'] lSideParts = [x for x in dMetrics.itervalues() if x['Type'] != 'Main'] if len(lMainParts) < 1: raise DataException("Zero number of mainstem channel parts.") self.metrics['ChannelCount'] = len([x for x in dMetrics.itervalues()]) self.metrics['MainstemCount']= len(lMainParts) self.metrics['SideChannelCount'] = len(lSideParts) self.metrics['MainstemLength'] = sum([fLen['Length'] for fLen in lMainParts]) # Let's avoid some divisions by zero self.metrics['MainstemSinuosity'] = self.metrics['MainstemLength'] / sum([fLen['StraightLength'] for fLen in lMainParts]) self.metrics['SideChannelLength'] = sum([fLen['Length'] for fLen in lSideParts]) self.metrics['TotalChannelLength'] = self.metrics['MainstemLength'] + self.metrics['SideChannelLength'] if self.metrics['ChannelCount'] > 0: self.metrics['AverageSideChannelLength'] = self.metrics['TotalChannelLength'] / self.metrics['ChannelCount'] if self.metrics['MainstemLength'] > 0: self.metrics['Braidedness'] = (self.metrics['MainstemLength'] + self.metrics['SideChannelLength']) / self.metrics['MainstemLength'] if len(dMetrics) > 1: self.metrics['ChannelType'] = 'complex' else: self.metrics['ChannelType'] = 'simple'
def calc(self, a, b, c): self.metrics['a'] = a self.metrics['b'] = b self.metrics['c'] = c if a <= 0 or b <= 0 or c <= 0: raise DataException( "I AM AN EXCEPTION. LOOK AT ME AND DESPAIR")
def _CenterlinePartMetrics(self, centerline): """ Centerline Part Metrics :param centerline: :return: """ self.log.info("Loading centerline shapefile: {}".format(centerline)) clShp = Shapefile(centerline) clList = clShp.featuresToShapely() dMetrics = {} lineIndex = 1 for aLine in clList: if type(aLine['geometry']) is MultiLineString: raise DataException('Multipart features in centerline') curvedLength = aLine['geometry'].length firstPoint = Point(aLine['geometry'].coords[0]) lastPoint = Point(aLine['geometry'].coords[-1]) straightLength = firstPoint.distance(lastPoint) if straightLength == 0: raise DataException("Zero length centerline feature encountered") if 'Channel' not in aLine['fields']: raise DataException("Centerline 'Channel' field missing") if aLine['fields']['Channel'] is None: raise DataException("Centerline 'Channel' field contains no data") dMetrics[lineIndex] = {} dMetrics[lineIndex]['Type'] = aLine['fields']['Channel'] dMetrics[lineIndex]['Length'] = curvedLength dMetrics[lineIndex]['Sinuosity'] = curvedLength / straightLength dMetrics[lineIndex]['StraightLength'] = straightLength lineIndex += 1 return dMetrics
def visitFishCountMetrics(visitMetrics, visitobj): snorkelFish = visitobj['snorkelFish'] snorkelFishBinned = visitobj['snorkelFishBinned'] snorkelFishSteelheadBinned = visitobj['snorkelFishSteelheadBinned'] for mItem in metricMapping: try: visitFishCountMetricsForSpecies(visitMetrics, snorkelFish, snorkelFishBinned, snorkelFishSteelheadBinned, mItem[0], mItem[1]) except AttributeError, e: raise DataException( "visitFishCountMetricsForSpecies: Missing attribute for item: {}, {}" .format(str(mItem[0]), str(mItem[1])))
def tier1FishCountMetrics(tier1Metrics, visitobj): channelUnits = visitobj['channelUnits'] snorkelFish = visitobj['snorkelFish'] snorkelFishBinned = visitobj['snorkelFishBinned'] snorkelFishSteelheadBinned = visitobj['snorkelFishSteelheadBinned'] for mItem in metricMapping: try: tier1FishCountMetricsForSpecies(tier1Metrics, channelUnits, snorkelFish, snorkelFishBinned, snorkelFishSteelheadBinned, mItem[0], mItem[1]) except AttributeError, e: raise DataException( "tier1FishCountMetrics: Missing attribute for item: {}, {}". format(str(mItem[0]), str(mItem[1])))
def interpolatePointsAlongLine(line, fStationInterval): """ Given a cross section (Linestring) and a spacing point return regularly spaced points along that line :param line: :param fStationInterval: :return: """ try: points = [ line.interpolate(currDist) for currDist in np.arange(0, line.length, fStationInterval) ] except TypeError, e: raise DataException( "Error interpolating thalweg in channel unit. Only linear types support this operation. Type of 'line' is '{}'" .format(line.type))
def dryWidth(xs, rivershapeWithDonuts): """ :param xs: shapely cross section object :param rivershapeWithDonuts: Polygon with non-qualifying donuts retained :return: """ # Get all intersects of this crosssection with the rivershape log = Logger("dryWidth") try: intersects = xs.intersection(rivershapeWithDonuts.buffer(0)) #KMW: buffer(0) clears up invalid geoms except TopologicalError as e: log.error(e.message) raise DataException("Could not perform intersection on `rivershapeWithDonuts`. Look for small, invalid islands as a possible cause.") # The intersect may be one object (LineString) or many. We have to handle both cases if intersects.type == "LineString": intersects = MultiLineString([intersects]) elif intersects.type == "Point": return 0 return sum([intersect.length for intersect in intersects])
def calc(self, apiData): """ Calculate large wood metrics :param apiData: dictionary of API data. Key is API call name. Value is API data :return: metrics dictionary """ self.log.info("Running Large Wood Metrics") # Retrieve the site wetted length from the latest topo metrics metricInstance = latestMetricInstance(apiData['TopoVisitMetrics']) if metricInstance is None: raise MissingException('Missing topo visit metric instance') siteWettedLength = metricInstance['Lgth_Wet'] if apiData['VisitDetails']['sampleYear'] < 2014: woodData = [ val['value'] for val in apiData['LargeWoodyDebris']['values'] ] # Only 2011 and 2012 have separate wood jam data jamData = None if 'WoodyDebrisJam' in apiData: jamData = [ val['value'] for val in apiData['WoodyDebrisJam']['values'] ] metrics = LargeWoodMetrics._calcFrequency2011to2013( woodData, jamData, siteWettedLength) else: if 'LargeWoodyPiece' not in apiData: raise DataException("LargeWoodyPiece needed and not found.") woodData = [ val['value'] for val in apiData['LargeWoodyPiece']['values'] ] metrics = LargeWoodMetrics._calcFrequency2014On( woodData, siteWettedLength) self.metrics = {'VisitMetrics': {'Frequency': metrics}}
def hydro_gis_export(hydro_project_xml, topo_project_xml, outfolder): """ :param jsonFilePath: :param outputFolder: :param bVerbose: :return: """ #gdal.UseExceptions() log = Logger("Hydro GIS Export") # 1 todo Read project.rs.xml rs_hydro = Project(hydro_project_xml) rs_topo = TopoProject(topo_project_xml) hydro_results_folder = os.path.dirname(hydro_project_xml) if not rs_hydro.ProjectMetadata.has_key("Visit"): raise MissingException("Cannot Find Visit ID") visit_id = rs_hydro.ProjectMetadata['Visit'] dem = gdal.Open(rs_topo.getpath("DEM")) dem_srs = dem.GetProjection() dem_x_size = dem.RasterXSize dem_y_size = dem.RasterYSize dem_band = dem.GetRasterBand(1) dem_ndv = dem_band.GetNoDataValue() dem_geotransfrom = dem.GetGeoTransform() # 3 Get data columns in csv file csvfile = os.path.join(hydro_results_folder, "dem_grid_results.csv") csvfile_clean = os.path.join(hydro_results_folder, "dem_grid_results_clean_header.csv") if not os.path.isfile(csvfile): raise MissingException("Required file {} does not exist.".format(csvfile)) with open(csvfile, "rb") as f_in, open(csvfile_clean, "wb") as f_out: reader = csv.reader(f_in) # writer = csv.writer(f_out) cols = [col for col in reader.next() if col not in ["Y", "X"]]#[col.replace(".", "_") for col in reader.next() if col not in ["Y", "X"]] log.info("Loaded fields from csv file.") # writer.writerow(['X', 'Y'] + cols) # for row in reader: # writer.writerow(row) # log.info("Saved csv file with sanitized headers.") # Write VRT file vrt = os.path.join(hydro_results_folder, '{}.vrt'.format("dem_grid_results")) with open(vrt, 'wt') as f: f.write('<OGRVRTDataSource>\n') f.write('\t<OGRVRTLayer name="{}">\n'.format("dem_grid_results")) f.write('\t\t<SrcDataSource>{}</SrcDataSource>\n'.format(csvfile)) f.write('\t\t<SrcLayer>{}</SrcLayer>\n'.format("dem_grid_results")) f.write('\t\t<GeometryType>wkbPoint25D</GeometryType>\n') f.write('\t\t<LayerSRS>{}</LayerSRS>\n'.format(dem_srs)) f.write('\t\t<GeometryField encoding="PointFromColumns" x="X" y="Y" />\n') for field in cols: f.write('\t\t<Field name="{}" type="Real" subtype="Float32" />\n'.format(field)) f.write('\t</OGRVRTLayer>\n') f.write('</OGRVRTDataSource>\n') log.info("Generated vrt file {}".format(vrt)) # Open csv as OGR ogr_vrt = ogr.Open(vrt, 1) if ogr_vrt is None: raise DataException("unable to open {}".format(vrt)) layer = ogr_vrt.GetLayer() # 4 Generate geotiff for each column in the CSV file driver = gdal.GetDriverByName("GTiff") for col in cols: out_tif = os.path.join(outfolder, '{}.tif'.format(col)) out_raster = driver.Create(out_tif, dem_x_size, dem_y_size, 1, gdalconst.GDT_Float32) out_raster.SetGeoTransform(dem_geotransfrom) out_raster.SetProjection(dem_srs) band = out_raster.GetRasterBand(1) band.SetNoDataValue(dem_ndv) band.FlushCache() gdal.RasterizeLayer(out_raster, [1], layer, options=["ATTRIBUTE={}".format(col)]) band.GetStatistics(0, 1) band.FlushCache() out_raster.FlushCache() log.info("Generated {} for attribute {}".format(out_tif, col)) if col == "Depth": raw = numpy.array(band.ReadAsArray()) masked = numpy.ma.masked_array(raw, raw == dem_ndv) bool_raster = numpy.array(masked, "bool") numpy.greater(masked, 0, bool_raster) raster_mem = gdal.GetDriverByName("GTIFF").Create(os.path.join(outfolder, "Temp.tif"), dem_x_size, dem_y_size, 1, gdalconst.GDT_Int16) raster_mem.SetGeoTransform(dem_geotransfrom) raster_mem.SetProjection(dem_srs) band_mem = raster_mem.GetRasterBand(1) band_mem.WriteArray(bool_raster, 0, 0) band_mem.SetNoDataValue(dem_ndv) band_mem.FlushCache() temp = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(os.path.join(outfolder, "TempExtent.shp")) temp_layer = temp.CreateLayer("RawExtent", osr.SpatialReference(wkt=dem_srs), ogr.wkbPolygon) temp_layer.CreateField(ogr.FieldDefn("Value", ogr.OFTInteger)) temp_layer.CreateField(ogr.FieldDefn("Area", ogr.OFTReal)) gdal.Polygonize(band_mem, None, temp_layer, 0) del raster_mem # # for feature in temp_layer: # feature.SetField("Area", feature.GetGeometryRef().GetArea()) # temp_layer.SetFeature(feature) # Stage Extent # temp_layer.SetAttributeFilter("Value=1") # shp_extent = os.path.join(outfolder, "StageExtent.shp") # driver_extent = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(shp_extent) # driver_extent.CopyLayer(temp_layer, "StageExtent") # driver_extent = None # ogr_extent = ogr.Open(shp_extent, 1) # layer_extent = ogr_extent.GetLayer("StageExtent") # field_extent = ogr.FieldDefn("ExtentType", ogr.OFTString) # layer_extent.CreateField(field_extent) # area_current = 0.0 # fid_current = None # for feature in layer_extent: # area_feat = feature.GetGeometryRef().GetArea() # if area_feat > area_current: # area_current = area_feat # fid_current = feature.GetFID() # # edit_feat = layer_extent.GetFeature(fid_current) # edit_feat.SetField("ExtentType", "Channel") # layer_extent.SetFeature(edit_feat) # # layer_extent.DeleteField(layer_extent.FindFieldIndex("Value", True)) # #ogr_extent.Destroy() # log.info("Generated Stage Extent Shapefile {}".format(shp_extent)) # # # Stage Islands # import time # time.sleep(5) # temp_layer.ResetReading() # temp_layer.SetAttributeFilter("Value=0") # shp_islands = os.path.join(outfolder, "StageIslands.shp") # driver_islands = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(shp_islands) # driver_islands.CopyLayer(temp_layer, "StageIslands") # driver_islands = None # ogr_islands = ogr.Open(shp_islands, 1) # layer_islands = ogr_islands.GetLayer("StageIslands") # # field_qual = ogr.FieldDefn("Qualifying", ogr.OFTInteger) # field_qual.SetDefault("0") # field_valid = ogr.FieldDefn("IsValid", ogr.OFTInteger) # field_valid.SetDefault("0") # layer_islands.CreateField(field_qual) # layer_islands.CreateField(field_valid) # layer_islands.SyncToDisk() # # area_current = 0.0 # fid_current = None # for feature in layer_islands: # if feature is not None: # g = feature.GetGeometryRef() # area_feat = g.GetArea() # # todo identify qualifying islands here? # if area_feat > area_current: # area_current = area_feat # fid_current = feature.GetFID() # # #feat_del = layer_islands.GetFeature(fid_current) # layer_islands.DeleteFeature(fid_current) # # layer_islands.DeleteField(layer_islands.FindFieldIndex("Value", True)) # ogr_islands = None # ogr_extent = None # log.info("Generated Stage Islands Shapefile {}".format(shp_islands)) temp = None del out_raster shp_hydroresults = os.path.join(outfolder, "HydroResults.shp") ogr.GetDriverByName("ESRI Shapefile").CopyDataSource(ogr_vrt, shp_hydroresults) #out_shp = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource() # ogr_shp = ogr.Open(shp_hydroresults, 1) # lyr = ogr_shp.GetLayer() # lyr_defn = lyr.GetLayerDefn() # for i in range(lyr_defn.GetFieldCount()): # fielddefn = lyr_defn.GetFieldDefn(i) # fielddefn.SetName(fielddefn.GetName().replace(".","_")) # lyr.AlterFieldDefn(i, fielddefn, ogr.ALTER_NAME_FLAG) # # new_field = ogr.FieldDefn('V_Bearing', ogr.OFTReal) # lyr.CreateField(new_field) # # Calculate Velocity Bearing # for feat in lyr: # vel_x = feat.GetField("X_Velocity") # vel_y = feat.GetField("Y_Velocity") # dir = 90 - math.degrees(math.atan2(float(vel_y), float(vel_x))) # bearing = 360 + dir if dir < 0 else dir # feat.SetField('V_Bearing', float(bearing)) # lyr.SetFeature(feat) log.info("Generated Hydro Results Shapefile {}".format(shp_hydroresults)) ogr_vrt = None ogr_shp = None return 0
def generate_substrate_raster(topo_project_folder, out_path, di_values, dict_ocular_values, out_channel_value=4000.0): """Generate Substrate Raster from Channel units and ocular substrate estimates for each di value provided :param str topo_project_folder: folder source of the topo project :param str out_path: path for outputs :param list di_values: list of int percentile values for roughness calculation :param dict dict_ocular_values: dictionary of ocular estimates of grain size values :param float out_channel_value: roughness value to use for out of channel areas, default = 4000 :return: 0 for success """ # Load Topo Project log = Logger("SubstrateRaster") log.info("topo_project_folder: {}".format(str(topo_project_folder))) log.info("outputPath: {}".format(str(out_path))) log.info("D Values: {}".format(str(di_values))) project = topoproject.TopoProject(topo_project_folder) topo_rs_project = riverscapes.Project( os.path.join(topo_project_folder, "project.rs.xml")) log.info("Topo project loaded") # Initialize Riverscapes Project rsproject = riverscapes.Project() rsproject.create("Substrate", "Substrate", __version__) for tagname, tags in { "Site": ["Site", "SiteName"], "Visit": ["Visit", "VisitID"], "Year": ["Year", "FieldSeason"], "Watershed": ["Watershed", "Watershed"] }.iteritems(): if tags[0] in topo_rs_project.ProjectMetadata or tags[ 1] in topo_rs_project.ProjectMetadata: rsproject.addProjectMetadata( tagname, topo_rs_project.ProjectMetadata[tags[0]] if tags[0] in topo_rs_project.ProjectMetadata else topo_rs_project.ProjectMetadata[tags[1]]) else: raise DataException("Missing project metadata") # 1. Do some math on the dictionary of substrate values for each di dict_di_roughness_values = {} list_keep_units = [] for di in di_values: dict_units = dict_ocular_by_unit(dict_ocular_values) dict_roughness_values = {} for unitid, dict_unit in dict_units.iteritems(): if all(dict_unit[key] is not None for key in [ "Bedrock", "Boulders", "Cobbles", "CourseGravel", "FineGravel", "Fines", "Sand" ]): dict_roughness_values[int(unitid)] = calculate_grain_size( dict_unit, di) if unitid not in list_keep_units: list_keep_units.append(unitid) else: log.warning( "Missing Channel Unit Substrate Values for Unit {}.". format(str(unitid))) dict_roughness_values[0] = float( out_channel_value) # Out of Channel "UnitNumber" == 0 dict_di_roughness_values[di] = pandas.DataFrame( list(dict_roughness_values.iteritems()), index=dict_roughness_values.keys(), columns=["UnitNumber", "Roughness"]) log.info("Calculated Roughness Values for D{}".format(str(di))) # 2. Spread the channel Unit areas gdf_expanded_channel_units = expand_polygons( project.getpath("ChannelUnits"), project.getpath("BankfullExtent"), keep_units=list_keep_units) log.info("Channel Units expanded to Bankfull Area") # 3. Add DEM area gdf_demextent = geopandas.GeoDataFrame.from_features( geopandas.GeoSeries(get_data_polygon(project.getpath("DEM")))) if not all(gdf_demextent.geometry.is_valid): gdf_demextent.geometry = gdf_demextent.geometry.buffer(0) log.info("Fix invalid geoms for DEM Extent") gdf_demextent["UnitNumber"] = 0 gdf_in_channel_union = geopandas.GeoDataFrame.from_features( geopandas.GeoSeries(gdf_expanded_channel_units.unary_union.buffer(0))) gdf_out_of_channel = geopandas.overlay(gdf_demextent, gdf_in_channel_union, "difference") gdf_full_polygons = gdf_expanded_channel_units.append(gdf_out_of_channel) log.info("Out of Channel Area generated") for di, df_roughness_values in dict_di_roughness_values.iteritems(): # 4 Add dict to channel units gdf_full_polygons_merged = gdf_full_polygons.merge(df_roughness_values, on="UnitNumber") gdf_final_polys = gdf_full_polygons_merged.rename( columns={"Roughness_y": "Roughness"}) gdf_final_polys.drop([ col for col in gdf_final_polys.columns if col not in ["UnitNumber", "Roughness", 'geometry'] ], axis=1, inplace=True) log.info("Roughness Values added to Channel Units for D{}".format( str(di))) # 5. Rasterize Polygons raster_substrate = path.join(out_path, "substrate_D{}.tif".format(str(di))) shp_substrate = path.join(out_path, "substrate_D{}.shp".format(str(di))) gdf_final_polys.to_file(shp_substrate) log.info("Saved Substrate Shapefile: {}".format(shp_substrate)) rasterize_polygons(shp_substrate, project.getpath("DEM"), raster_substrate, "Roughness") log.info("Created Substrate Raster: {}".format(raster_substrate)) # Add Realization to Riverscapes realization = riverscapes.Realization("Substrate") realization.name = "Substrate_D{}".format(str(di)) realization.productVersion = __version__ ds_shapefile = riverscapes.Dataset().create( "Substrate_Shapefile", "substrate_D{}.shp".format(str(di))) ds_raster = riverscapes.Dataset().create( "Substrate_Raster", "substrate_D{}.tif".format(str(di))) ds_shapefile.metadata["D_Value"] = str(di) ds_raster.metadata["D_Value"] = str(di) ds_shapefile.id = "substrate_shapefile_d{}".format(str(di)) ds_raster.id = "substrate_shapefile_d{}".format(str(di)) realization.outputs[ds_shapefile.name] = ds_shapefile realization.outputs[ds_raster.name] = ds_raster rsproject.addRealization(realization) # Write Riverscapes Project. rsprojectxml = os.path.join(out_path, "project.rs.xml") rsproject.writeProjectXML(rsprojectxml) log.info("Riverscapes Project file saved: {}".format(rsprojectxml)) return 0
def calc(self, shpCUPath, shpThalweg, rasDepth, visitMetrics, dUnits, unitDefs): if not os.path.isfile(shpCUPath): raise MissingException("Channel units file not found") if not os.path.isfile(shpThalweg): raise MissingException("Thalweg shape file not found") if not os.path.isfile(rasDepth): raise MissingException("Depth raster file not found") siteLength = visitMetrics['Wetted']['Centerline']['MainstemLength'] if siteLength is None: raise DataException("No valid site length found in visit metrics") # Give us a fresh template with 0's in the value positions self.metrics = self._templateMaker(0, unitDefs) dResultsChannelSummary = self.metrics['ResultsChannelSummary'] dResultsTier1 = self.metrics['ResultsTier1'] dResultsTier2 = self.metrics['ResultsTier2'] resultsCU = self.metrics['resultsCU'] #Load the Thalweg feature thalweg = Shapefile(shpThalweg).featuresToShapely() thalwegLine = thalweg[0]['geometry'] # Load the depth raster depthRaster = Raster(rasDepth) # Load the channel unit polygons and calculate the total area # The channel units should be clipped to the wetted extent and so this # can be used as the site area wetted shpCU = Shapefile(shpCUPath) arrCU = depthRaster.rasterMaskLayer(shpCUPath, "UnitNumber") feats = shpCU.featuresToShapely() for aFeat in feats: dResultsChannelSummary['Main']['Area'] += aFeat['geometry'].area # Loop over each channel unit and calculate topometrics for aFeat in feats: nCUNumber = int(aFeat['fields']['UnitNumber']) if nCUNumber not in dUnits: self.log.error( "Channel Unit: '{0}' not present in the aux data.".format( nCUNumber)) # Keep it general for the exception so we can aggregate them raise DataException( "The Channel Unit ShapeFile contains a unit number that is not present in the aux data." ) tier1Name = dUnits[nCUNumber][0] tier2Name = dUnits[nCUNumber][1] nSegment = dUnits[nCUNumber][2] #print "Channel Unit Number {0}, Segment {1}, Tier 1 - {2}, Tier 2 - {3}".format(nCUNumber, nSegment, tier1Name, tier2Name) unitMetrics = {} resultsCU.append(unitMetrics) unitMetrics['ChannelUnitNumber'] = nCUNumber unitMetrics['Area'] = aFeat['geometry'].area unitMetrics['Tier1'] = tier1Name unitMetrics['Tier2'] = tier2Name unitMetrics['Length'] = None unitMetrics['ResidualDepth'] = None unitMetrics['DepthAtThalwegExit'] = None unitMetrics['ThalwegIntersect'] = 0 # Get the depth raster for this unit as variable so we can check # whether it is entirely masked below. depArr = depthRaster.array[arrCU == nCUNumber] if depArr.count() == 0: unitMetrics['MaxDepth'] = 0 unitMetrics['Volume'] = 0 else: unitMetrics['MaxDepth'] = np.max(depArr) unitMetrics['Volume'] = np.sum(depthRaster.array[ arrCU == nCUNumber]) * (depthRaster.cellWidth**2) if nSegment != 1: dSideChannelSummary = dResultsChannelSummary[ 'SideChannelSummary'] dMain = dResultsChannelSummary['Main'] # Side channel summary captures both small and large side channels dSideChannelSummary['Area'] += aFeat['geometry'].area dSideChannelSummary['Count'] += 1 dSideChannelSummary['Percent'] = 100 * dSideChannelSummary[ 'Area'] / dMain['Area'] dSideChannelSummary['Volume'] += unitMetrics['Volume'] if 'side' in tier1Name.lower(): dSmallSideChannel = dResultsChannelSummary[ 'SmallSideChannel'] dSmallSideChannel['Area'] += aFeat['geometry'].area dSmallSideChannel['Count'] += 1 dSmallSideChannel['Percent'] = 100 * dSmallSideChannel[ 'Area'] / dMain['Area'] dSmallSideChannel['Volume'] += unitMetrics['Volume'] else: dLargeSideChannel = dResultsChannelSummary[ 'LargeSideChannel'] dLargeSideChannel['Area'] += aFeat['geometry'].area dLargeSideChannel['Count'] += 1 dLargeSideChannel['Percent'] = 100 * dLargeSideChannel[ 'Area'] / dMain['Area'] dLargeSideChannel['Volume'] += unitMetrics['Volume'] if tier1Name is None: raise DataException("tier1Name cannot be 'None'") if 'side' in tier1Name.lower(): dResultsChannelSummary['ChannelUnitBreakdown'][ 'SmallSideChannel'] += 1 else: dResultsChannelSummary['ChannelUnitBreakdown']['Other'] += 1 if (thalwegLine.intersects(aFeat['geometry'])): cuThalwegLine = thalwegLine.intersection(aFeat['geometry']) exitPoint = None if cuThalwegLine.type == 'LineString': exitPoint = cuThalwegLine.coords[0] else: exitPoint = cuThalwegLine[0].coords[0] # Retrieve a list of points along the Thalweg in the channel unit thalwegPoints = ChannelUnitMetrics.interpolatePointsAlongLine( cuThalwegLine, 0.13) thalwegDepths = ChannelUnitMetrics.lookupRasterValuesAtPoints( thalwegPoints, depthRaster) unitMetrics['MaxDepth'] = np.nanmax(thalwegDepths['values']) unitMetrics['DepthAtThalwegExit'] = depthRaster.getPixelVal( exitPoint) unitMetrics['ResidualDepth'] = unitMetrics[ 'MaxDepth'] - unitMetrics['DepthAtThalwegExit'] unitMetrics['Length'] = cuThalwegLine.length unitMetrics['ThalwegIntersect'] = 1 # Tier 1 and tier 2 topometrics. Note that metric dictionary keys are used for XML tags & require cleaning tier1NameClean = getCleanTierName(tier1Name) self._calcTierLevelMetrics(dResultsTier1[tier1NameClean], tier1Name, unitMetrics, siteLength, dResultsChannelSummary['Main']['Area']) tier2NameClean = getCleanTierName(tier2Name) self._calcTierLevelMetrics(dResultsTier2[tier2NameClean], tier2Name, unitMetrics, siteLength, dResultsChannelSummary['Main']['Area']) # Calculate the average of the channel unit max depths for each tier 1 and tier 2 type for tierKey, tierMetrics in { 'Tier1': dResultsTier1, 'Tier2': dResultsTier2 }.iteritems(): for tierName, metricDict in tierMetrics.iteritems(): maxDepthList = [ aResult['MaxDepth'] for aResult in resultsCU if getCleanTierName(aResult[tierKey]) == tierName ] if len(maxDepthList) > 0: metricDict['AvgMaxDepth'] = np.average(maxDepthList) # Convert the sum of residual depth and depth at thalweg exist # to average residual depth for each tier 1 and tier 2 type for tierMetricDict in [dResultsTier1, dResultsTier2]: for tierName, tierMetrics in tierMetricDict.iteritems(): # channel unit types that don't occur should retain the value None for Residual Depth and Depth at Thalweg exit if tierMetrics['Count'] > 0 and tierMetrics[ 'ThalwegIntersectCount'] > 0: for metricName in ['ResidualDepth', 'DepthAtThalwegExit']: if tierMetrics[metricName] is not None and tierMetrics[ metricName] != 0: tierMetrics[metricName] = tierMetrics[ metricName] / tierMetrics[ 'ThalwegIntersectCount'] else: tierMetrics[metricName] = 0
def getCleanTierName(sTierName): try: return sTierName.replace(' ', '').replace('/', '').replace('-', '') except Exception, e: raise DataException( "Invalid or null tiername passed to 'getCleanTierName()'")
def getfield(feat, listfields): for field in listfields: if field in feat['fields']: return field raise DataException('Could not find field {} in shapefile'.format(str(listfields)))
def export_cad_files(project_xml, out_path): """exports dxf files containing tin components of topo tin and Topographic Survey Points, Lines and Survey Extent""" log = Logger("CADExport") # Load Topo project log.info("Load Topo project") project = topoproject.TopoProject(project_xml) # TIN stuff log.info("Beginning TIN Work") tin = TIN(project.getpath("TopoTin")) dict_tinlayers = {} dict_tinlayers["tin_points"] = {"layer_type":"POINT", "Features":[feat for feat in tin.nodes.values()]} dict_tinlayers["tin_lines"] = {"layer_type":"POLYLINE", "Features":[feat['geometry'] for feat in tin.breaklines.values()]}#, "linetype_field":"LineType"} dict_tinlayers["tin_area"] = {"layer_type":"POLYGON", "Features":[feat for feat in tin.hull_polygons.values()]} out_tin_dxf = export_as_dxf(dict_tinlayers, os.path.join(out_path, "TopoTin.dxf")) # Topo Stuff log.info("Beginning Topo Work") shpTopo = Shapefile(project.getpath("Topo_Points")) shpEOW = Shapefile(project.getpath("EdgeofWater_Points")) shpCP = Shapefile(project.getpath("Control_Points")) shpBL = Shapefile(project.getpath("Breaklines")) if project.layer_exists("Breaklines") else None shpExtent = Shapefile(project.getpath("Survey_Extent")) dict_topolayers = {} dict_topolayers["Topo_Points"] = {"layer_type":"POINT", "Features":[feat['geometry'] for feat in shpTopo.featuresToShapely()]} dict_topolayers["EdgeofWater_Points"] = {"layer_type":"POINT", "Features":[feat['geometry'] for feat in shpEOW.featuresToShapely()]} dict_topolayers["Control_Points"] = {"layer_type":"POINT", "Features":[feat['geometry'] for feat in shpCP.featuresToShapely()]} dict_topolayers["Breaklines"] = {"layer_type":"POLYLINE", "Features":[feat['geometry'] for feat in shpBL.featuresToShapely()]} if shpBL else None dict_topolayers["Survey_Extent"] = {"layer_type":"POLYGON", "Features":[feat['geometry'] for feat in shpExtent.featuresToShapely()]} out_topo_dxf = export_as_dxf(dict_topolayers, os.path.join(out_path, "SurveyTopography.dxf")) out_topo_csv = exportAsCSV(shpTopo.featuresToShapely() + shpEOW.featuresToShapely(), os.path.join(out_path, "SurveyTopographyPoints.csv")) out_control_csv = exportAsCSV(shpCP.featuresToShapely(), os.path.join(out_path, "ControlNetworkPoints.csv")) topo_rs_project = riverscapes.Project(project_xml) out_project = riverscapes.Project() out_project.create("CHaMP_Survey_CAD_Export", "CAD_Export", __version__) out_project.addProjectMetadata("Watershed", topo_rs_project.ProjectMetadata["Watershed"]) # find previous meta tags for tagname, tags in {"Site": ["Site", "SiteName"], "Visit": ["Visit", "VisitID"], "Year": ["Year", "FieldSeason"], "Watershed": ["Watershed", "Watershed"]}.iteritems(): if tags[0] in topo_rs_project.ProjectMetadata or tags[1] in topo_rs_project.ProjectMetadata: out_project.addProjectMetadata(tagname, topo_rs_project.ProjectMetadata[tags[0]] if tags[0] in topo_rs_project.ProjectMetadata else topo_rs_project.ProjectMetadata[tags[1]]) else: raise DataException("Missing project metadata") out_realization = riverscapes.Realization("CAD_Export") out_realization.name = "CHaMP Survey CAD Export" out_realization.productVersion = out_project.projectVersion ds = [] ds.append(out_project.addInputDataset("TopoTin", "tin", None, None, "TIN", project.get_guid("TopoTin"))) ds.append(out_project.addInputDataset("Topo_Points", "topo_points", None, guid=project.get_guid("Topo_Points"))) ds.append(out_project.addInputDataset("EdgeofWater_Points", "eow_points", None, guid=project.get_guid("EdgeofWater_Points"))) ds.append(out_project.addInputDataset("Control_Points", "control_ponts", None, guid=project.get_guid("Control_Points"))) if shpBL: ds.append(out_project.addInputDataset("Breaklines", "breaklines", None, guid=project.get_guid("Breaklines"))) ds.append(out_project.addInputDataset("Survey_Extent", "survey_extent", None, guid=project.get_guid("Survey_Extent"))) for inputds in ds: out_realization.inputs[inputds.name] = inputds.id ds_tin_dxf = riverscapes.Dataset() ds_tin_dxf.create("TIN_DXF", "TopoTin.dxf") ds_tin_dxf.id = 'tin_dxf' ds_topo_dxf = riverscapes.Dataset() ds_topo_dxf.create("Topo_DXF", "SurveyTopography.dxf") ds_topo_dxf.id = 'topo_dxf' ds_topo_csv = riverscapes.Dataset() ds_topo_csv.create("Topo_CSV", "SurveyTopographyPoints.csv", "CSV") ds_topo_csv.id = 'topo_csv' ds_con_csv = riverscapes.Dataset() ds_con_csv.create("Control_CSV", "ControlNetworkPoints.csv", "CSV") ds_con_csv.id = 'control_csv' out_realization.outputs.update({"TIN_DXF": ds_tin_dxf, "Topo_DXF": ds_topo_dxf, "Topo_CSV": ds_topo_csv, "Control_CSV": ds_con_csv}) out_project.addRealization(out_realization) out_project.writeProjectXML(os.path.join(out_path, "project.rs.xml")) return 0
def calc(self, sThalwegshp, sDepthRaster, sWaterSurfaceRaster, fDist, visitMetrics): if not path.isfile(sThalwegshp): raise MissingException("Thalweg shapefile missing") if not path.isfile(sDepthRaster): raise MissingException("Depth raster missing") if not path.isfile(sWaterSurfaceRaster): raise MissingException("Surface raster missing") wettedMainstemLength = visitMetrics['Wetted']['Centerline'][ 'MainstemLength'] if wettedMainstemLength is None: raise MissingException( "No wetted mainstem length found in visit metrics") sfile = Shapefile(sThalwegshp).featuresToShapely() if len(sfile) < 1: raise DataException("Thalweg shapefile has no features") thalweg = sfile[0]['geometry'] depthRaster = Raster(sDepthRaster) waterSurfaceRaster = Raster(sWaterSurfaceRaster) samplepts = ThalwegMetrics.interpolateRasterAlongLine(thalweg, fDist) results = ThalwegMetrics.lookupRasterValues(samplepts, depthRaster)['values'] # Get the elevation at the first (downstream) point on the Thalweg dsElev = waterSurfaceRaster.getPixelVal(thalweg.coords[0]) usElev = waterSurfaceRaster.getPixelVal(thalweg.coords[-1]) if (np.isnan(dsElev)): raise DataException( 'nodata detected in the raster for downstream point on the thalweg' ) elif np.isnan(usElev): raise DataException( 'nodata detected in the raster for upstream point on the thalweg' ) waterSurfaceGradientRatio = (usElev - dsElev) / thalweg.length waterSurfaceGradientPC = waterSurfaceGradientRatio * 100.0 # Thalweg straight length and sinuosity firstPoint = Point(thalweg.coords[0]) lastPoint = Point(thalweg.coords[-1]) straightLength = firstPoint.distance(lastPoint) sinuosity = thalweg.length / straightLength self.metrics = { 'Min': np.nanmin(results), 'Max': np.nanmax(results), 'Mean': np.mean(results), 'StDev': np.std(results), 'Count': np.count_nonzero(results), 'Length': thalweg.length, 'WSGradientRatio': waterSurfaceGradientRatio, 'WSGradientPC': waterSurfaceGradientPC, 'Sinuosity': sinuosity, 'CV': 0.0, 'ThalwegToCenterlineRatio': thalweg.length / wettedMainstemLength #, 'Values': results.data } if self.metrics['StDev'] != 0 and self.metrics['Mean'] != 0: self.metrics['CV'] = self.metrics['StDev'] / self.metrics['Mean']
def calc(self, crosssections, waterExtent, demPath, stationInterval): # Save space by only loading the desired fields from the ShapeFile. # We also need the 'Channel' and 'IsValid' fields if they exist. desiredFields = CrossSectionMetrics.dMetricTypes.keys() desiredFields.append('IsValid') # Open the cross section ShapeFile & build a list of all features with a dictionary of the desired fields clShp = Shapefile(crosssections) lChannels = ['Main'] if not clShp.loaded: return if "Channel" in clShp.fields: desiredFields.append('Channel') lChannels.append('Side') # Older Cross Section Layers don't have the width and depth fields calculated. # So if all the necessary metric fields are present then continue to load # ShapeFile features. Otherwise we need to calculate the topometrics from scratch bMetricCalculationNeeded = False for aMetric in desiredFields: if not aMetric in clShp.fields: bMetricCalculationNeeded = True break allFeatures = [] if bMetricCalculationNeeded: # Retrieve the water extent polygon exterior rivershp = Shapefile(waterExtent) polyRiverShapeFeats = rivershp.featuresToShapely() # Try and find a channel shape. Thers's a lot of variance here. if len(polyRiverShapeFeats) == 0 or 'geometry' not in polyRiverShapeFeats[0]: raise DataException("No features in crosssection shape file") # If there's only one shape then just use it elif len(polyRiverShapeFeats) == 1: polyRiverShape = polyRiverShapeFeats[0]['geometry'] # If there's more than one shape then see if else: channelShapes = [feat['geometry'] for feat in polyRiverShapeFeats if feat['fields']['ExtentType'] == 'Channel'] if len(channelShapes) == 0: raise DataException("No features in crosssection shape file") polyRiverShape = channelShapes[0] # Calculate the topometrics from scratch for a single cross section shpXS = Shapefile(crosssections) demRaster = Raster(demPath) if shpXS.loaded: for aFeat in shpXS.featuresToShapely(): # Calculate the topometrics for this cross section. They will be stored on the aFeat dict under key 'topometrics' calcXSMetrics(aFeat, polyRiverShape , demRaster, stationInterval) # Build the all features dictionary that would be expect had the topometrics already # existed in the XS shapefile and simply got loaded. This is a combination of the new topometrics # and also the existing fields on the XS ShapeFile. singleXSMetrics = copy.deepcopy(aFeat['topometrics']) singleXSMetrics.update(aFeat['fields']) allFeatures.append(singleXSMetrics) # Destroying the raster object appears to prevent warning messages on Windows demRaster = None else: allFeatures = clShp.attributesToList(desiredFields) # For simple ShapeFiles, make every feature part of the main channel, and # set every feature as valid. This helps keep code below generic for x in allFeatures: if 'Channel' not in x: x['Channel'] = 'Main' if 'IsValid' not in x: x['IsValid'] = 1 for channelName in lChannels: # Filter the list of features to just those in this channel # PGB - 24 Apr 2017 - observed NULL values in 'Channel' ShapeFile field in Harold results. # Cast field contents to string to avoid crash here. channelFeatures = [x for x in allFeatures if str(x['Channel']).lower() == channelName.lower()] # Filter the list of features to just those that the crew considered valid validFeatures = [x for x in channelFeatures if x['IsValid'] <> 0] # Filter the features to just those with a length that is within 4 standard deviations of mean wetted width channelStatistics = getStatistics(channelFeatures, 'WetWidth') autoFeatures = None if channelStatistics['StdDev'] is not None: wetWidthThreshold = channelStatistics['StdDev'] * 4 autoFeatures = [x for x in channelFeatures if abs(x['WetWidth'] - channelStatistics['Mean']) < wetWidthThreshold] # Loop over each desired metric and calculate the statistics for each filtering type for metricName, bestFiltering in CrossSectionMetrics.dMetricTypes.iteritems(): populateChannelStatistics(self.metrics[channelName], 'None', metricName, channelFeatures) populateChannelStatistics(self.metrics[channelName], 'Crew', metricName, validFeatures) if channelStatistics['StdDev'] is not None: populateChannelStatistics(self.metrics[channelName], 'Auto', metricName, autoFeatures) self.metrics[channelName]['Best'] = self.metrics[channelName][bestFiltering] # The topometrics for the whole channel are always the results for 'Main'. # For complex ShapeFiles this will be just the results for the main channel. # For simple, single threaded, ShapeFiles this will all cross sections. self.metrics['Channel'] = self.metrics['Main']