def main(): """ Build gravity reservoirs in GSFLOW: combines MODFLOW grid and HRU sub-basins These define the PRMS soil zone that connects to MODFLOW cells """ ################## # OPTION PARSING # ################## # I/O options, flags = gscript.parser() # I/O HRUs = options['hru_input'] grid = options['grid_input'] segments = options['output'] #col = options['col'] gravity_reservoirs = options['output'] ############ # ANALYSIS # ############ """ # Basin areas v.db_addcolumn(map=basins, columns=col) v.to_db(map=basins, option='area', units='meters', columns=col) """ # Create gravity reservoirs -- overlay cells=grid and HRUs v.overlay(ainput=HRUs, binput=grid, atype='area', btype='area', operator='and', output=gravity_reservoirs, overwrite=gscript.overwrite()) v.db_dropcolumn(map=gravity_reservoirs, columns='a_cat,a_label,b_cat', quiet=True) # Cell and HRU ID's v.db_renamecolumn(map=gravity_reservoirs, column=('a_id', 'gvr_hru_id'), quiet=True) v.db_renamecolumn(map=gravity_reservoirs, column=('b_id', 'gvr_cell_id'), quiet=True) # Percent areas v.db_renamecolumn(map=gravity_reservoirs, column=('a_hru_area_m2', 'hru_area_m2'), quiet=True) v.db_renamecolumn(map=gravity_reservoirs, column=('b_area_m2', 'cell_area_m2'), quiet=True) v.db_addcolumn(map=gravity_reservoirs, columns='area_m2 double precision', quiet=True) v.to_db(map=gravity_reservoirs, option='area', units='meters', columns='area_m2', quiet=True) v.db_addcolumn(map=gravity_reservoirs, columns='gvr_cell_pct double precision, gvr_hru_pct double precision', quiet=True) v.db_update(map=gravity_reservoirs, column='gvr_cell_pct', query_column='100*area_m2/cell_area_m2', quiet=True) v.db_update(map=gravity_reservoirs, column='gvr_hru_pct', query_column='100*area_m2/hru_area_m2', quiet=True) v.extract(input=gravity_reservoirs, output='tmp_', where="gvr_cell_pct > 0.001", overwrite=True, quiet=True) g.rename(vector=('tmp_',gravity_reservoirs), overwrite=True, quiet=True)
def raster_to_vector(raster, vector, type): """Converts a raster to a vector map Parameters ---------- raster : Name of the input raster map vector : Name for the output vector map type : Type for the output vector map Returns ------- Examples -------- .. """ r.to_vect(input=flow_in_category, output=flow_in_category, type="area", quiet=True) # Value is the ecosystem type v.db_renamecolumn(map=flow_in_category, column=("value", "ecosystem")) # New column for flow values addcolumn_string = flow_column_name + " double" v.db_addcolumn(map=flow_in_category, columns=addcolumn_string) # The raster category 'label' is the 'flow' v.db_update(map=flow_in_category, column="flow", query_column="label") v.db_dropcolumn(map=flow_in_category, columns="label") # Update the aggregation raster categories v.db_addcolumn(map=flow_in_category, columns="aggregation_id int") v.db_update(map=flow_in_category, column="aggregation_id", value=category) v.colors(map=flow_in_category, raster=flow_in_category, quiet=True)
def main(): """ Import any raster or vector data set and add its attribute to a GSFLOW data object """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # Parsing if options["attrtype"] == "int": attrtype = "integer" elif options["attrtype"] == "float": attrtype = "double precision" elif options["attrtype"] == "string": attrtype = "varchar" else: attrtype = "" ######################################## # PROCESS AND UPLOAD TO DATABASE TABLE # ######################################## if options["vector_area"] is not "": gscript.use_temp_region() g.region(vector=options["map"], res=options["dxy"]) v.to_rast( input=options["vector_area"], output="tmp___tmp", use="attr", attribute_column=options["from_column"], quiet=True, overwrite=True, ) try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns=options["column"], quiet=True) except: pass if attrtype is "double precision": try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns="tmp_average", quiet=True) except: pass v.rast_stats( map=options["map"], raster="tmp___tmp", column_prefix="tmp", method="average", flags="c", quiet=True, ) g.remove(type="raster", name="tmp___tmp", flags="f", quiet=True) v.db_renamecolumn( map=options["map"], column=["tmp_average", options["column"]], quiet=True, ) else: try: v.db_addcolumn( map=options["map"], columns=options["column"] + " " + attrtype, quiet=True, ) except: pass gscript.run_command( "v.distance", from_=options["map"], to=options["vector_area"], upload="to_attr", to_column=options["from_column"], column=options["column"], quiet=True, ) elif options["vector_points"] is not "": try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns=options["column"], quiet=True) v.db_addcolumn( map=options["map"], columns=options["column"] + " " + attrtype, quiet=True, ) except: pass gscript.run_command( "v.distance", from_=options["map"], to=options["vector_points"], upload="to_attr", to_column=options["from_column"], column=options["column"], quiet=True, ) elif options["raster"] is not "": try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns=options["column"], quiet=True) except: pass v.rast_stats( map=options["map"], raster=options["raster"], column_prefix="tmp", method="average", flags="c", quiet=True, ) v.db_renamecolumn(map=options["map"], column=["tmp_average", options["column"]], quiet=True) gscript.message("Done.")
def main(): """ Adds GSFLOW parameters to a set of HRU sub-basins """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() basins = options['input'] HRU = options['output'] slope = options['slope'] aspect = options['aspect'] elevation = options['elevation'] land_cover = options['cov_type'] soil = options['soil_type'] ################################ # CREATE HRUs FROM SUB-BASINS # ################################ g.copy(vector=(basins,HRU), overwrite=gscript.overwrite()) ############################################ # ATTRIBUTE COLUMNS (IN ORDER FROM MANUAL) # ############################################ # HRU hru_columns = [] # Self ID hru_columns.append('id integer') # nhru # Basic Physical Attributes (Geometry) hru_columns.append('hru_area double precision') # acres (!!!!) hru_columns.append('hru_area_m2 double precision') # [not for GSFLOW: for me!] hru_columns.append('hru_aspect double precision') # Mean aspect [degrees] hru_columns.append('hru_elev double precision') # Mean elevation hru_columns.append('hru_lat double precision') # Latitude of centroid hru_columns.append('hru_lon double precision') # Longitude of centroid # unnecessary but why not? hru_columns.append('hru_slope double precision') # Mean slope [percent] # Basic Physical Attributes (Other) #hru_columns.append('hru_type integer') # 0=inactive; 1=land; 2=lake; 3=swale; almost all will be 1 #hru_columns.append('elev_units integer') # 0=feet; 1=meters. 0=default. I think I will set this to 1 by default. # Measured input hru_columns.append('outlet_sta integer') # Index of streamflow station at basin outlet: # station number if it has one, 0 if not # Note that the below specify projections and note lat/lon; they really seem # to work for any projected coordinates, with _x, _y, in meters, and _xlong, # _ylat, in feet (i.e. they are just northing and easting). The meters and feet # are not just simple conversions, but actually are required for different # modules in the code, and are hence redundant but intentional. hru_columns.append('hru_x double precision') # Easting [m] hru_columns.append('hru_xlong double precision') # Easting [feet] hru_columns.append('hru_y double precision') # Northing [m] hru_columns.append('hru_ylat double precision') # Northing [feet] # Streamflow and lake routing hru_columns.append('K_coef double precision') # Travel time of flood wave to next downstream segment; # this is the Muskingum storage coefficient # 1.0 for reservoirs, diversions, and segments flowing # out of the basin hru_columns.append('x_coef double precision') # Amount of attenuation of flow wave; # this is the Muskingum routing weighting factor # range: 0.0--0.5; default 0.2 # 0 for all segments flowing out of the basin hru_columns.append('hru_segment integer') # ID of stream segment to which flow will be routed # this is for non-cascade routing (flow goes directly # from HRU to stream segment) hru_columns.append('obsin_segment integer') # Index of measured streamflow station that replaces # inflow to a segment hru_columns.append('cov_type integer') # 0=bare soil;1=grasses; 2=shrubs; 3=trees; 4=coniferous hru_columns.append('soil_type integer') # 1=sand; 2=loam; 3=clay # Create strings hru_columns = ",".join(hru_columns) # Add columns to tables v.db_addcolumn(map=HRU, columns=hru_columns, quiet=True) ########################### # UPDATE DATABASE ENTRIES # ########################### colNames = np.array(gscript.vector_db_select(HRU, layer=1)['columns']) colValues = np.array(gscript.vector_db_select(HRU, layer=1)['values'].values()) number_of_hrus = colValues.shape[0] cats = colValues[:,colNames == 'cat'].astype(int).squeeze() rnums = colValues[:,colNames == 'rnum'].astype(int).squeeze() nhru = np.arange(1, number_of_hrus + 1) nhrut = [] for i in range(len(nhru)): nhrut.append( (nhru[i], cats[i]) ) # Access the HRUs hru = VectorTopo(HRU) # Open the map with topology: hru.open('rw') # Create a cursor cur = hru.table.conn.cursor() # Use it to loop across the table cur.executemany("update "+HRU+" set id=? where cat=?", nhrut) # Commit changes to the table hru.table.conn.commit() # Close the table hru.close() """ # Do the same for basins <-------------- DO THIS OR SIMPLY HAVE HRUs OVERLAIN WITH GRID CELLS? IN THIS CASE, RMV AREA ADDITION TO GRAVRES v.db_addcolumn(map=basins, columns='id int', quiet=True) basins = VectorTopo(basins) basins.open('rw') cur = basins.table.conn.cursor() cur.executemany("update basins set id=? where cat=?", nhrut) basins.table.conn.commit() basins.close() """ # if you want to append to table # cur.executemany("update HRU(id) values(?)", nhrut) # "insert into" will add rows #hru_columns.append('hru_area double precision') # Acres b/c USGS v.to_db(map=HRU, option='area', columns='hru_area', units='acres', quiet=True) v.to_db(map=HRU, option='area', columns='hru_area_m2', units='meters', quiet=True) # GET MEAN VALUES FOR THESE NEXT ONES, ACROSS THE BASIN # SLOPE (and aspect) ##################### v.rast_stats(map=HRU, raster=slope, method='average', column_prefix='tmp', flags='c', quiet=True) v.db_update(map=HRU, column='hru_slope', query_column='tmp_average', quiet=True) # ASPECT ######### v.db_dropcolumn(map=HRU, columns='tmp_average', quiet=True) # Dealing with conversion from degrees (no good average) to something I can # average -- x- and y-vectors # Geographic coordinates, so sin=x, cos=y.... not that it matters so long # as I am consistent in how I return to degrees r.mapcalc('aspect_x = sin(' + aspect + ')', overwrite=gscript.overwrite(), quiet=True) r.mapcalc('aspect_y = cos(' + aspect + ')', overwrite=gscript.overwrite(), quiet=True) #grass.run_command('v.db.addcolumn', map=HRU, columns='aspect_x_sum double precision, aspect_y_sum double precision, ncells_in_hru integer') v.rast_stats(map=HRU, raster='aspect_x', method='sum', column_prefix='aspect_x', flags='c', quiet=True) v.rast_stats(map=HRU, raster='aspect_y', method='sum', column_prefix='aspect_y', flags='c', quiet=True) hru = VectorTopo(HRU) hru.open('rw') cur = hru.table.conn.cursor() cur.execute("SELECT cat,aspect_x_sum,aspect_y_sum FROM %s" %hru.name) _arr = np.array(cur.fetchall()).astype(float) _cat = _arr[:,0] _aspect_x_sum = _arr[:,1] _aspect_y_sum = _arr[:,2] aspect_angle = np.arctan2(_aspect_y_sum, _aspect_x_sum) * 180. / np.pi aspect_angle[aspect_angle < 0] += 360 # all positive aspect_angle_cat = np.vstack((aspect_angle, _cat)).transpose() cur.executemany("update "+ HRU +" set hru_aspect=? where cat=?", aspect_angle_cat) hru.table.conn.commit() hru.close() # ELEVATION ############ v.rast_stats(map=HRU, raster=elevation, method='average', column_prefix='tmp', flags='c', quiet=True) v.db_update(map=HRU, column='hru_elev', query_column='tmp_average', quiet=True) v.db_dropcolumn(map=HRU, columns='tmp_average', quiet=True) # CENTROIDS ############ # get x,y of centroid -- but have areas not in database table, that do have # centroids, and having a hard time finding a good way to get rid of them! # They have duplicate category values! # Perhaps these are little dangles on the edges of the vectorization where # the raster value was the same but pinched out into 1-a few cells? # From looking at map, lots of extra centroids on area boundaries, and removing # small areas (though threshold hard to guess) gets rid of these hru = VectorTopo(HRU) hru.open('rw') hru_cats = [] hru_coords = [] for hru_i in hru: if type(hru_i) is vector.geometry.Centroid: hru_cats.append(hru_i.cat) hru_coords.append(hru_i.coords()) hru_cats = np.array(hru_cats) hru_coords = np.array(hru_coords) hru.rewind() hru_area_ids = [] for coor in hru_coords: _area = hru.find_by_point.area(Point(coor[0], coor[1])) hru_area_ids.append(_area) hru_area_ids = np.array(hru_area_ids) hru.rewind() hru_areas = [] for _area_id in hru_area_ids: hru_areas.append(_area_id.area()) hru_areas = np.array(hru_areas) hru.rewind() allcats = sorted(list(set(list(hru_cats)))) # Now create weighted mean hru_centroid_locations = [] for cat in allcats: hrus_with_cat = hru_cats[hru_cats == cat] if len(hrus_with_cat) == 1: hru_centroid_locations.append((hru_coords[hru_cats == cat]).squeeze()) else: _centroids = hru_coords[hru_cats == cat] #print _centroids _areas = hru_areas[hru_cats == cat] #print _areas _x = np.average(_centroids[:,0], weights=_areas) _y = np.average(_centroids[:,1], weights=_areas) #print _x, _y hru_centroid_locations.append(np.array([_x, _y])) # Now upload weighted mean to database table # allcats and hru_centroid_locations are co-indexed index__cats = create_iterator(HRU) cur = hru.table.conn.cursor() for i in range(len(allcats)): # meters cur.execute('update '+HRU +' set hru_x='+str(hru_centroid_locations[i][0]) +' where cat='+str(allcats[i])) cur.execute('update '+HRU +' set hru_y='+str(hru_centroid_locations[i][1]) +' where cat='+str(allcats[i])) # feet cur.execute('update '+HRU +' set hru_xlong='+str(hru_centroid_locations[i][0]*3.28084) +' where cat='+str(allcats[i])) cur.execute('update '+HRU +' set hru_ylat='+str(hru_centroid_locations[i][1]*3.28084) +' where cat='+str(allcats[i])) # (un)Project to lat/lon _centroid_ll = gscript.parse_command('m.proj', coordinates= list(hru_centroid_locations[i]), flags='od').keys()[0] _lon, _lat, _z = _centroid_ll.split('|') cur.execute('update '+HRU +' set hru_lon='+_lon +' where cat='+str(allcats[i])) cur.execute('update '+HRU +' set hru_lat='+_lat +' where cat='+str(allcats[i])) # feet -- not working. # Probably an issue with index__cats -- maybe fix later, if needed # But currently not a major speed issue """ cur.executemany("update "+HRU+" set hru_xlong=?*3.28084 where hru_x=?", index__cats) cur.executemany("update "+HRU+" set hru_ylat=?*3.28084 where hru_y=?", index__cats) """ cur.close() hru.table.conn.commit() hru.close() # ID NUMBER ############ #cur.executemany("update "+HRU+" set hru_segment=? where id=?", # index__cats) # Segment number = HRU ID number v.db_update(map=HRU, column='hru_segment', query_column='id', quiet=True) # LAND USE/COVER ############ try: land_cover = int(land_cover) except: pass if type(land_cover) is int: if land_cover <= 3: v.db_update(map=HRU, column='cov_type', value=land_cover, quiet=True) else: sys.exit("WARNING: INVALID LAND COVER TYPE. CHECK INTEGER VALUES.\n" "EXITING TO ALLOW USER TO CHANGE BEFORE RUNNING GSFLOW") else: # NEED TO UPDATE THIS TO MODAL VALUE!!!! gscript.message("Warning: values taken from HRU centroids. Code should be updated to") gscript.message("acquire modal values") v.what_rast(map=HRU, type='centroid', raster=land_cover, column='cov_type', quiet=True) #v.rast_stats(map=HRU, raster=land_cover, method='average', column_prefix='tmp', flags='c', quiet=True) #v.db_update(map=HRU, column='cov_type', query_column='tmp_average', quiet=True) #v.db_dropcolumn(map=HRU, columns='tmp_average', quiet=True) # SOIL ############ try: soil = int(soil) except: pass if type(soil) is int: if (soil > 0) and (soil <= 3): v.db_update(map=HRU, column='soil_type', value=soil, quiet=True) else: sys.exit("WARNING: INVALID SOIL TYPE. CHECK INTEGER VALUES.\n" "EXITING TO ALLOW USER TO CHANGE BEFORE RUNNING GSFLOW") else: # NEED TO UPDATE THIS TO MODAL VALUE!!!! gscript.message("Warning: values taken from HRU centroids. Code should be updated to") gscript.message("acquire modal values") v.what_rast(map=HRU, type='centroid', raster=soil, column='soil_type', quiet=True)
# Not really. Drainage from one catchment goes to multiple basins r.watershed(elevation=elevation, flow='cellArea_km2', accumulation='drainageArea_km2', drainage='drainageDirection', stream='streams_tmp', threshold=thresh, flags='s', overwrite=True) # Remove areas of negative (offmap) accumulation r.mapcalc('drainageArea_km2 = drainageArea_km2 * (drainageArea_km2 > 0)', overwrite=True) r.null(map='drainageArea_km2', setnull=0) #r.mapcalc("streams_tmp = streams_tmp * (drainageArea_km2 > 0)", overwrite=True) # Streams from new thresholding r.mapcalc("streams_tmp = drainageArea_km2 > "+str(thresh), overwrite=True) r.null(map="streams_tmp", setnull=0) """ # Prepare the stream lines and the points version of the same r.thin(input='streams_tmp', output='streams', overwrite=True) r.to_vect(input='streams', output='streams', type='line', overwrite=True) v.db_dropcolumn(map='streams', columns='label') v.db_renamecolumn(map='streams', column=('value', 'river_number')) r.to_vect(input='streams', output='streams_points', type='point', overwrite=True) v.db_dropcolumn(map='streams_points', columns='label') v.db_renamecolumn(map='streams_points', column=('value', 'river_number')) # Get slope and area v.db_addcolumn(map='streams_points', columns=('slope double precision, area_km2 double precision')) v.what_rast(map='streams_points', type='point', raster='slope', column='slope') v.what_rast(map='streams_points', type='point', raster='drainageArea_km2',
def main(): """ Import any raster or vector data set and add its attribute to a GSFLOW data object """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # Parsing if options['attrtype'] == 'int': attrtype = 'integer' elif options['attrtype'] == 'float': attrtype = 'double precision' elif options['attrtype'] == 'string': attrtype = 'varchar' else: attrtype = '' ######################################## # PROCESS AND UPLOAD TO DATABASE TABLE # ######################################## if options['vector_area'] is not '': gscript.use_temp_region() g.region(vector=options['map'], res=options['dxy']) v.to_rast(input=options['vector_area'], output='tmp___tmp', use='attr', attribute_column=options['from_column'], quiet=True, overwrite=True) try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns=options['column'], quiet=True) except: pass if attrtype is 'double precision': try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns='tmp_average', quiet=True) except: pass v.rast_stats(map=options['map'], raster='tmp___tmp', column_prefix='tmp', method='average', flags='c', quiet=True) g.remove(type='raster', name='tmp___tmp', flags='f', quiet=True) v.db_renamecolumn(map=options['map'], column=['tmp_average', options['column']], quiet=True) else: try: v.db_addcolumn(map=options['map'], columns=options['column'] + ' ' + attrtype, quiet=True) except: pass gscript.run_command('v.distance', from_=options['map'], to=options['vector_area'], upload='to_attr', to_column=options['from_column'], column=options['column'], quiet=True) elif options['vector_points'] is not '': try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns=options['column'], quiet=True) v.db_addcolumn(map=options['map'], columns=options['column'] + ' ' + attrtype, quiet=True) except: pass gscript.run_command('v.distance', from_=options['map'], to=options['vector_points'], upload='to_attr', to_column=options['from_column'], column=options['column'], quiet=True) elif options['raster'] is not '': try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns=options['column'], quiet=True) except: pass v.rast_stats(map=options['map'], raster=options['raster'], column_prefix='tmp', method='average', flags='c', quiet=True) v.db_renamecolumn(map=options['map'], column=['tmp_average', options['column']], quiet=True) gscript.message("Done.")
def raster_to_vector(raster_category_flow, vector_category_flow, flow_column_name, category, type): """Converts a raster to a vector map Parameters ---------- raster_category_flow : Name of the input raster map 'flow in category' vector_category_flow : Name for the output vector map 'flow in category' type : Type for the output vector map Returns ------- Examples -------- .. """ msg = " * Vectorising raster map '{r}'" grass.verbose( _( msg.format( c=category, r=raster_category_flow, v=vector_category_flow, ))) r.to_vect( input=raster_category_flow, output=vector_category_flow, type="area", quiet=True, ) msg = " * Updating the attribute table" grass.verbose(_(msg)) # Value is the ecosystem type v.db_renamecolumn( map=vector_category_flow, column=("value", "ecosystem"), quiet=True, ) # New column for flow values addcolumn_string = flow_column_name + " double" v.db_addcolumn( map=vector_category_flow, columns=addcolumn_string, quiet=True, ) # The raster category 'label' is the 'flow' v.db_update( map=vector_category_flow, column="flow", query_column="label", quiet=True, ) v.db_dropcolumn( map=vector_category_flow, columns="label", quiet=True, ) # Update the aggregation raster categories v.db_addcolumn( map=vector_category_flow, columns="aggregation_id int", quiet=True, ) v.db_update( map=vector_category_flow, column="aggregation_id", value=category, quiet=True, ) v.colors( map=vector_category_flow, raster=raster_category_flow, quiet=True, )
def main(): """ Builds river segments for input to the USGS hydrologic models PRMS and GSFLOW. """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # I/O streams = options["input"] segments = options["output"] # Hydraulic geometry ICALC = int(options["icalc"]) # ICALC=0: Constant depth WIDTH1 = options["width1"] WIDTH2 = options["width2"] # ICALC=1,2: Manning (in channel and overbank): below # ICALC=3: Power-law relationships (following Leopold and others) # The at-a-station default exponents are from Rhodes (1977) CDPTH = str(float(options["cdpth"]) / 35.3146667) # cfs to m^3/s FDPTH = options["fdpth"] AWDTH = str(float(options["awdth"]) / 35.3146667) # cfs to m^3/s BWDTH = options["bwdth"] ################################################## # CHECKING DEPENDENCIES WITH OPTIONAL PARAMETERS # ################################################## if ICALC == 3: if CDPTH and FDPTH and AWDTH and BWDTH: pass else: gscript.fatal("Missing CDPTH, FDPTH, AWDTH, and/or BWDTH. \ These are required when ICALC = 3.") ########### # RUNNING # ########### # New Columns for Segments segment_columns = [] # Self ID segment_columns.append("id integer") # segment number segment_columns.append("ISEG integer") # segment number segment_columns.append("NSEG integer") # segment number # for GSFLOW segment_columns.append( "ICALC integer" ) # 1 for channel, 2 for channel+fp, 3 for power function segment_columns.append( "OUTSEG integer") # downstream segment -- tostream, renumbered segment_columns.append("ROUGHCH double precision") # overbank roughness segment_columns.append("ROUGHBK double precision") # in-channel roughness segment_columns.append("WIDTH1 double precision") # overbank roughness segment_columns.append("WIDTH2 double precision") # in-channel roughness segment_columns.append("CDPTH double precision") # depth coeff segment_columns.append("FDPTH double precision") # depth exp segment_columns.append("AWDTH double precision") # width coeff segment_columns.append("BWDTH double precision") # width exp segment_columns.append( "floodplain_width double precision" ) # floodplain width (8-pt approx channel + flat fp) # The below will be all 0 segment_columns.append( "IUPSEG varchar") # upstream segment ID number, for diversions segment_columns.append("FLOW varchar") segment_columns.append("RUNOFF varchar") segment_columns.append("ETSW varchar") segment_columns.append("PPTSW varchar") segment_columns = ",".join(segment_columns) # CONSIDER THE EFFECT OF OVERWRITING COLUMNS -- WARN FOR THIS # IF MAP EXISTS ALREADY? # Create a map to work with g.copy(vector=(streams, segments), overwrite=gscript.overwrite()) # and add its columns v.db_addcolumn(map=segments, columns=segment_columns) # Produce the data table entries ################################## colNames = np.array(gscript.vector_db_select(segments, layer=1)["columns"]) colValues = np.array( gscript.vector_db_select(segments, layer=1)["values"].values()) number_of_segments = colValues.shape[0] cats = colValues[:, colNames == "cat"].astype(int).squeeze() nseg = np.arange(1, len(cats) + 1) nseg_cats = [] for i in range(len(cats)): nseg_cats.append((nseg[i], cats[i])) segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() # id = cat (as does ISEG and NSEG) cur.executemany("update " + segments + " set id=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set ISEG=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set NSEG=? where cat=?", nseg_cats) # outseg = tostream: default is 0 if "tostream" is off-map cur.execute("update " + segments + " set OUTSEG=0") cur.executemany("update " + segments + " set OUTSEG=? where tostream=?", nseg_cats) # Hydraulic geometry selection cur.execute("update " + segments + " set ICALC=" + str(ICALC)) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 0: gscript.message("") gscript.message("ICALC=0 (constant) not supported") gscript.message("Continuing nonetheless.") gscript.message("") if ICALC == 1: if options["width_points"] is not "": # Can add machinery here for separate upstream and downstream widths # But really should not vary all that much # v.to_db(map=segments, option='start', columns='xr1,yr1') # v.to_db(map=segments, option='end', columns='xr2,yr2') gscript.run_command( "v.distance", from_=segments, to=options["width_points"], upload="to_attr", to_column=options["width_points_col"], column="WIDTH1", ) v.db_update(map=segments, column="WIDTH2", query_column="WIDTH1") else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set WIDTH1=" + str(WIDTH1)) cur.execute("update " + segments + " set WIDTH2=" + str(WIDTH2)) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 2: # REMOVE THIS MESSAGE ONCE THIS IS INCLUDED IN INPUT-FILE BUILDER gscript.message("") gscript.message("ICALC=2 (8-point channel + floodplain) not supported") gscript.message("Continuing nonetheless.") gscript.message("") if options["fp_width_pts"] is not "": gscript.run_command( "v.distance", from_=segments, to=options["fp_width_pts"], upload="to_attr", to_column=options["fp_width_pts_col"], column="floodplain_width", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set floodplain_width=" + str(options["fp_width_value"])) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 3: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set CDPTH=" + str(CDPTH)) cur.execute("update " + segments + " set FDPTH=" + str(FDPTH)) cur.execute("update " + segments + " set AWDTH=" + str(AWDTH)) cur.execute("update " + segments + " set BWDTH=" + str(BWDTH)) segmentsTopo.table.conn.commit() segmentsTopo.close() # values that are 0 gscript.message("") gscript.message("NOTICE: not currently used:") gscript.message("IUPSEG, FLOW, RUNOFF, ETSW, and PPTSW.") gscript.message("All set to 0.") gscript.message("") segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set IUPSEG=" + str(0)) cur.execute("update " + segments + " set FLOW=" + str(0)) cur.execute("update " + segments + " set RUNOFF=" + str(0)) cur.execute("update " + segments + " set ETSW=" + str(0)) cur.execute("update " + segments + " set PPTSW=" + str(0)) segmentsTopo.table.conn.commit() segmentsTopo.close() # Roughness # ICALC=1,2: Manning (in channel) if (options["roughch_raster"] is not "") and (options["roughch_points"] is not ""): gscript.fatal( "Choose either a raster or vector or a value as Manning's n input." ) if options["roughch_raster"] is not "": ROUGHCH = options["roughch_raster"] v.rast_stats( raster=ROUGHCH, method="average", column_prefix="tmp", map=segments, flags="c", ) # v.db_renamecolumn(map=segments, column='tmp_average,ROUGHCH', quiet=True) v.db_update(map=segments, column="ROUGHCH", query_column="tmp_average", quiet=True) v.db_dropcolumn(map=segments, columns="tmp_average", quiet=True) elif options["roughch_points"] is not "": ROUGHCH = options["roughch_points"] gscript.run_command( "v.distance", from_=segments, to=ROUGHCH, upload="to_attr", to_column=options["roughch_pt_col"], column="ROUGHCH", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() ROUGHCH = options["roughch_value"] cur.execute("update " + segments + " set ROUGHCH=" + str(ROUGHCH)) segmentsTopo.table.conn.commit() segmentsTopo.close() # ICALC=2: Manning (overbank) if (options["roughbk_raster"] is not "") and (options["roughbk_points"] is not ""): gscript.fatal( "Choose either a raster or vector or a value as Manning's n input." ) if options["roughbk_raster"] is not "": ROUGHBK = options["roughbk_raster"] v.rast_stats( raster=ROUGHBK, method="average", column_prefix="tmp", map=segments, flags="c", ) v.db_renamecolumn(map=segments, column="tmp_average,ROUGHBK", quiet=True) elif options["roughbk_points"] is not "": ROUGHBK = options["roughbk_points"] gscript.run_command( "v.distance", from_=segments, to=ROUGHBK, upload="to_attr", to_column=options["roughbk_pt_col"], column="ROUGHBK", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() ROUGHBK = options["roughbk_value"] cur.execute("update " + segments + " set ROUGHBK=" + str(ROUGHBK)) segmentsTopo.table.conn.commit() segmentsTopo.close()
v.db_renamecolumn(map='reaches', column=('a_x1', 'x1')) v.db_renamecolumn(map='reaches', column=('a_x2', 'x2')) v.db_renamecolumn(map='reaches', column=('a_y1', 'y1')) v.db_renamecolumn(map='reaches', column=('a_y2', 'y2')) v.db_renamecolumn(map='reaches', column=('a_stream_type', 'stream_type')) v.db_renamecolumn(map='reaches', column=('a_type_code', 'type_code')) v.db_renamecolumn(map='reaches', column=('a_cat', 'rnum_cat')) v.db_renamecolumn(map='reaches', column=('a_tostream', 'tostream')) v.db_renamecolumn(map='reaches', column=('a_id', 'segment_id')) v.db_renamecolumn(map='reaches', column=('a_OUTSEG', 'OUTSEG')) v.db_renamecolumn(map='reaches', column=('b_row', 'row')) v.db_renamecolumn(map='reaches', column=('b_col', 'col')) v.db_renamecolumn(map='reaches', column=('b_id', 'cell_id')) # Drop some unnecessary columns v.db_dropcolumn(map='reaches', columns='b_area_m2') # Update some columns that can be done now v.db_update(map='reaches', column='KRCH', value=1) v.db_update(map='reaches', column='IRCH', value='row') v.db_update(map='reaches', column='JRCH', value='col') v.db_update(map='reaches', column='ISEG', value='segment_id') v.db_update(map='reaches', column='NSEG', value='segment_id') v.to_db(map='reaches', columns='RCHLEN', option='length') v.db_update(map='reaches', column='STRTHICK', value=0.1) # 10 cm, prescribed # Still to go after these: # STRTOP (added with slope) # IREACH (whole next section dedicated to this) # SLOPE (need z_start and z_end)
def Generate(Qa): """ This function is a recursive function that will call on its own unless both the Del values are less than or equal to 2.5 """ Dels = [] global Iterations Iterations += 1 for net in Network: Arcs = [net[i:i + 2] for i in range(3)] Hl, Hl_Qa = {}, {} # Head Loss and Head Loss / Qa for arc in Arcs: Hl[arc] = (Qa[arc]**2) * k[arc] * Dir[arc] Hl_Qa[arc] = abs(Hl[arc] / Qa[arc]) Del = -sum(Hl.values()) / (n * sum(Hl_Qa.values())) Dels.append(Del) # Append the Overall Dels to O_Del O_Del.append(Dels) # Adding attribute columns with the values v.db_addcolumn(map='myNet', columns='value double precision') for net in Network: Arcs = [net[i:i + 2] for i in range(3)] for arc in Arcs: whr = "name like '%s'" % arc.lower() v.db_update(map='myNet', column='value', where=whr, value=round(abs(Qa[arc]), 3)) v.label(map='myNet', type='line', column='value', labels='Line', size='5', opaque='no', color='red') v.label(map='TankPoints', type='point', column='name', labels='TankPoints', size='5', opaque='no', color='red') # Prepare Map ps = Module('ps.map') ps(input=Join('Test1', '#ps_scripts', 'GenNetwork.psmap'), output='Network-Iteration-%s.ps' % Iterations, flags=O) v.db_dropcolumn(map='myNet', columns='value') if not all(i >= con for i in Dels): New_Qa = {} for id, net in enumerate(Network): Arcs = [net[i:i + 2] for i in range(3)] for arc in Arcs: if arc == 'BD': New_Qa[arc] = Qa[arc] * Dir[arc] + Dels[id] - Dels[int( not id)] elif arc == 'DB': New_Qa[arc] = Qa[arc] * Dir[arc] + Dels[id] - Dels[int( not id)] else: New_Qa[arc] = Qa[arc] * Dir[arc] + Dels[id] Generate(New_Qa) else: print("\n\n") print("Number of Iterations: %s\n" % Iterations) pprint(Qa)