def raster_to_vector(raster, vector, type): """Converts a raster to a vector map Parameters ---------- raster : Name of the input raster map vector : Name for the output vector map type : Type for the output vector map Returns ------- Examples -------- .. """ r.to_vect(input=flow_in_category, output=flow_in_category, type="area", quiet=True) # Value is the ecosystem type v.db_renamecolumn(map=flow_in_category, column=("value", "ecosystem")) # New column for flow values addcolumn_string = flow_column_name + " double" v.db_addcolumn(map=flow_in_category, columns=addcolumn_string) # The raster category 'label' is the 'flow' v.db_update(map=flow_in_category, column="flow", query_column="label") v.db_dropcolumn(map=flow_in_category, columns="label") # Update the aggregation raster categories v.db_addcolumn(map=flow_in_category, columns="aggregation_id int") v.db_update(map=flow_in_category, column="aggregation_id", value=category) v.colors(map=flow_in_category, raster=flow_in_category, quiet=True)
def main(): """ Build gravity reservoirs in GSFLOW: combines MODFLOW grid and HRU sub-basins These define the PRMS soil zone that connects to MODFLOW cells """ ################## # OPTION PARSING # ################## # I/O options, flags = gscript.parser() # I/O HRUs = options['hru_input'] grid = options['grid_input'] segments = options['output'] #col = options['col'] gravity_reservoirs = options['output'] ############ # ANALYSIS # ############ """ # Basin areas v.db_addcolumn(map=basins, columns=col) v.to_db(map=basins, option='area', units='meters', columns=col) """ # Create gravity reservoirs -- overlay cells=grid and HRUs v.overlay(ainput=HRUs, binput=grid, atype='area', btype='area', operator='and', output=gravity_reservoirs, overwrite=gscript.overwrite()) v.db_dropcolumn(map=gravity_reservoirs, columns='a_cat,a_label,b_cat', quiet=True) # Cell and HRU ID's v.db_renamecolumn(map=gravity_reservoirs, column=('a_id', 'gvr_hru_id'), quiet=True) v.db_renamecolumn(map=gravity_reservoirs, column=('b_id', 'gvr_cell_id'), quiet=True) # Percent areas v.db_renamecolumn(map=gravity_reservoirs, column=('a_hru_area_m2', 'hru_area_m2'), quiet=True) v.db_renamecolumn(map=gravity_reservoirs, column=('b_area_m2', 'cell_area_m2'), quiet=True) v.db_addcolumn(map=gravity_reservoirs, columns='area_m2 double precision', quiet=True) v.to_db(map=gravity_reservoirs, option='area', units='meters', columns='area_m2', quiet=True) v.db_addcolumn(map=gravity_reservoirs, columns='gvr_cell_pct double precision, gvr_hru_pct double precision', quiet=True) v.db_update(map=gravity_reservoirs, column='gvr_cell_pct', query_column='100*area_m2/cell_area_m2', quiet=True) v.db_update(map=gravity_reservoirs, column='gvr_hru_pct', query_column='100*area_m2/hru_area_m2', quiet=True) v.extract(input=gravity_reservoirs, output='tmp_', where="gvr_cell_pct > 0.001", overwrite=True, quiet=True) g.rename(vector=('tmp_',gravity_reservoirs), overwrite=True, quiet=True)
def main(): """ Import any raster or vector data set and add its attribute to a GSFLOW data object """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # Parsing if options["attrtype"] == "int": attrtype = "integer" elif options["attrtype"] == "float": attrtype = "double precision" elif options["attrtype"] == "string": attrtype = "varchar" else: attrtype = "" ######################################## # PROCESS AND UPLOAD TO DATABASE TABLE # ######################################## if options["vector_area"] is not "": gscript.use_temp_region() g.region(vector=options["map"], res=options["dxy"]) v.to_rast( input=options["vector_area"], output="tmp___tmp", use="attr", attribute_column=options["from_column"], quiet=True, overwrite=True, ) try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns=options["column"], quiet=True) except: pass if attrtype is "double precision": try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns="tmp_average", quiet=True) except: pass v.rast_stats( map=options["map"], raster="tmp___tmp", column_prefix="tmp", method="average", flags="c", quiet=True, ) g.remove(type="raster", name="tmp___tmp", flags="f", quiet=True) v.db_renamecolumn( map=options["map"], column=["tmp_average", options["column"]], quiet=True, ) else: try: v.db_addcolumn( map=options["map"], columns=options["column"] + " " + attrtype, quiet=True, ) except: pass gscript.run_command( "v.distance", from_=options["map"], to=options["vector_area"], upload="to_attr", to_column=options["from_column"], column=options["column"], quiet=True, ) elif options["vector_points"] is not "": try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns=options["column"], quiet=True) v.db_addcolumn( map=options["map"], columns=options["column"] + " " + attrtype, quiet=True, ) except: pass gscript.run_command( "v.distance", from_=options["map"], to=options["vector_points"], upload="to_attr", to_column=options["from_column"], column=options["column"], quiet=True, ) elif options["raster"] is not "": try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options["map"], columns=options["column"], quiet=True) except: pass v.rast_stats( map=options["map"], raster=options["raster"], column_prefix="tmp", method="average", flags="c", quiet=True, ) v.db_renamecolumn(map=options["map"], column=["tmp_average", options["column"]], quiet=True) gscript.message("Done.")
r.watershed(elevation=elevation, flow='cellArea_km2', accumulation='drainageArea_km2', drainage='drainageDirection', stream='streams_tmp', threshold=thresh, flags='s', overwrite=True) # Remove areas of negative (offmap) accumulation r.mapcalc('drainageArea_km2 = drainageArea_km2 * (drainageArea_km2 > 0)', overwrite=True) r.null(map='drainageArea_km2', setnull=0) #r.mapcalc("streams_tmp = streams_tmp * (drainageArea_km2 > 0)", overwrite=True) # Streams from new thresholding r.mapcalc("streams_tmp = drainageArea_km2 > "+str(thresh), overwrite=True) r.null(map="streams_tmp", setnull=0) """ # Prepare the stream lines and the points version of the same r.thin(input='streams_tmp', output='streams', overwrite=True) r.to_vect(input='streams', output='streams', type='line', overwrite=True) v.db_dropcolumn(map='streams', columns='label') v.db_renamecolumn(map='streams', column=('value', 'river_number')) r.to_vect(input='streams', output='streams_points', type='point', overwrite=True) v.db_dropcolumn(map='streams_points', columns='label') v.db_renamecolumn(map='streams_points', column=('value', 'river_number')) # Get slope and area v.db_addcolumn(map='streams_points', columns=('slope double precision, area_km2 double precision')) v.what_rast(map='streams_points', type='point', raster='slope', column='slope') v.what_rast(map='streams_points', type='point', raster='drainageArea_km2', column='area_km2')
def main(): """ Import any raster or vector data set and add its attribute to a GSFLOW data object """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # Parsing if options['attrtype'] == 'int': attrtype = 'integer' elif options['attrtype'] == 'float': attrtype = 'double precision' elif options['attrtype'] == 'string': attrtype = 'varchar' else: attrtype = '' ######################################## # PROCESS AND UPLOAD TO DATABASE TABLE # ######################################## if options['vector_area'] is not '': gscript.use_temp_region() g.region(vector=options['map'], res=options['dxy']) v.to_rast(input=options['vector_area'], output='tmp___tmp', use='attr', attribute_column=options['from_column'], quiet=True, overwrite=True) try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns=options['column'], quiet=True) except: pass if attrtype is 'double precision': try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns='tmp_average', quiet=True) except: pass v.rast_stats(map=options['map'], raster='tmp___tmp', column_prefix='tmp', method='average', flags='c', quiet=True) g.remove(type='raster', name='tmp___tmp', flags='f', quiet=True) v.db_renamecolumn(map=options['map'], column=['tmp_average', options['column']], quiet=True) else: try: v.db_addcolumn(map=options['map'], columns=options['column'] + ' ' + attrtype, quiet=True) except: pass gscript.run_command('v.distance', from_=options['map'], to=options['vector_area'], upload='to_attr', to_column=options['from_column'], column=options['column'], quiet=True) elif options['vector_points'] is not '': try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns=options['column'], quiet=True) v.db_addcolumn(map=options['map'], columns=options['column'] + ' ' + attrtype, quiet=True) except: pass gscript.run_command('v.distance', from_=options['map'], to=options['vector_points'], upload='to_attr', to_column=options['from_column'], column=options['column'], quiet=True) elif options['raster'] is not '': try: gscript.message("Checking for existing column to overwrite") v.db_dropcolumn(map=options['map'], columns=options['column'], quiet=True) except: pass v.rast_stats(map=options['map'], raster=options['raster'], column_prefix='tmp', method='average', flags='c', quiet=True) v.db_renamecolumn(map=options['map'], column=['tmp_average', options['column']], quiet=True) gscript.message("Done.")
def raster_to_vector(raster_category_flow, vector_category_flow, flow_column_name, category, type): """Converts a raster to a vector map Parameters ---------- raster_category_flow : Name of the input raster map 'flow in category' vector_category_flow : Name for the output vector map 'flow in category' type : Type for the output vector map Returns ------- Examples -------- .. """ msg = " * Vectorising raster map '{r}'" grass.verbose( _( msg.format( c=category, r=raster_category_flow, v=vector_category_flow, ))) r.to_vect( input=raster_category_flow, output=vector_category_flow, type="area", quiet=True, ) msg = " * Updating the attribute table" grass.verbose(_(msg)) # Value is the ecosystem type v.db_renamecolumn( map=vector_category_flow, column=("value", "ecosystem"), quiet=True, ) # New column for flow values addcolumn_string = flow_column_name + " double" v.db_addcolumn( map=vector_category_flow, columns=addcolumn_string, quiet=True, ) # The raster category 'label' is the 'flow' v.db_update( map=vector_category_flow, column="flow", query_column="label", quiet=True, ) v.db_dropcolumn( map=vector_category_flow, columns="label", quiet=True, ) # Update the aggregation raster categories v.db_addcolumn( map=vector_category_flow, columns="aggregation_id int", quiet=True, ) v.db_update( map=vector_category_flow, column="aggregation_id", value=category, quiet=True, ) v.colors( map=vector_category_flow, raster=raster_category_flow, quiet=True, )
def main(): """ Builds river segments for input to the USGS hydrologic models PRMS and GSFLOW. """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # I/O streams = options["input"] segments = options["output"] # Hydraulic geometry ICALC = int(options["icalc"]) # ICALC=0: Constant depth WIDTH1 = options["width1"] WIDTH2 = options["width2"] # ICALC=1,2: Manning (in channel and overbank): below # ICALC=3: Power-law relationships (following Leopold and others) # The at-a-station default exponents are from Rhodes (1977) CDPTH = str(float(options["cdpth"]) / 35.3146667) # cfs to m^3/s FDPTH = options["fdpth"] AWDTH = str(float(options["awdth"]) / 35.3146667) # cfs to m^3/s BWDTH = options["bwdth"] ################################################## # CHECKING DEPENDENCIES WITH OPTIONAL PARAMETERS # ################################################## if ICALC == 3: if CDPTH and FDPTH and AWDTH and BWDTH: pass else: gscript.fatal("Missing CDPTH, FDPTH, AWDTH, and/or BWDTH. \ These are required when ICALC = 3.") ########### # RUNNING # ########### # New Columns for Segments segment_columns = [] # Self ID segment_columns.append("id integer") # segment number segment_columns.append("ISEG integer") # segment number segment_columns.append("NSEG integer") # segment number # for GSFLOW segment_columns.append( "ICALC integer" ) # 1 for channel, 2 for channel+fp, 3 for power function segment_columns.append( "OUTSEG integer") # downstream segment -- tostream, renumbered segment_columns.append("ROUGHCH double precision") # overbank roughness segment_columns.append("ROUGHBK double precision") # in-channel roughness segment_columns.append("WIDTH1 double precision") # overbank roughness segment_columns.append("WIDTH2 double precision") # in-channel roughness segment_columns.append("CDPTH double precision") # depth coeff segment_columns.append("FDPTH double precision") # depth exp segment_columns.append("AWDTH double precision") # width coeff segment_columns.append("BWDTH double precision") # width exp segment_columns.append( "floodplain_width double precision" ) # floodplain width (8-pt approx channel + flat fp) # The below will be all 0 segment_columns.append( "IUPSEG varchar") # upstream segment ID number, for diversions segment_columns.append("FLOW varchar") segment_columns.append("RUNOFF varchar") segment_columns.append("ETSW varchar") segment_columns.append("PPTSW varchar") segment_columns = ",".join(segment_columns) # CONSIDER THE EFFECT OF OVERWRITING COLUMNS -- WARN FOR THIS # IF MAP EXISTS ALREADY? # Create a map to work with g.copy(vector=(streams, segments), overwrite=gscript.overwrite()) # and add its columns v.db_addcolumn(map=segments, columns=segment_columns) # Produce the data table entries ################################## colNames = np.array(gscript.vector_db_select(segments, layer=1)["columns"]) colValues = np.array( gscript.vector_db_select(segments, layer=1)["values"].values()) number_of_segments = colValues.shape[0] cats = colValues[:, colNames == "cat"].astype(int).squeeze() nseg = np.arange(1, len(cats) + 1) nseg_cats = [] for i in range(len(cats)): nseg_cats.append((nseg[i], cats[i])) segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() # id = cat (as does ISEG and NSEG) cur.executemany("update " + segments + " set id=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set ISEG=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set NSEG=? where cat=?", nseg_cats) # outseg = tostream: default is 0 if "tostream" is off-map cur.execute("update " + segments + " set OUTSEG=0") cur.executemany("update " + segments + " set OUTSEG=? where tostream=?", nseg_cats) # Hydraulic geometry selection cur.execute("update " + segments + " set ICALC=" + str(ICALC)) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 0: gscript.message("") gscript.message("ICALC=0 (constant) not supported") gscript.message("Continuing nonetheless.") gscript.message("") if ICALC == 1: if options["width_points"] is not "": # Can add machinery here for separate upstream and downstream widths # But really should not vary all that much # v.to_db(map=segments, option='start', columns='xr1,yr1') # v.to_db(map=segments, option='end', columns='xr2,yr2') gscript.run_command( "v.distance", from_=segments, to=options["width_points"], upload="to_attr", to_column=options["width_points_col"], column="WIDTH1", ) v.db_update(map=segments, column="WIDTH2", query_column="WIDTH1") else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set WIDTH1=" + str(WIDTH1)) cur.execute("update " + segments + " set WIDTH2=" + str(WIDTH2)) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 2: # REMOVE THIS MESSAGE ONCE THIS IS INCLUDED IN INPUT-FILE BUILDER gscript.message("") gscript.message("ICALC=2 (8-point channel + floodplain) not supported") gscript.message("Continuing nonetheless.") gscript.message("") if options["fp_width_pts"] is not "": gscript.run_command( "v.distance", from_=segments, to=options["fp_width_pts"], upload="to_attr", to_column=options["fp_width_pts_col"], column="floodplain_width", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set floodplain_width=" + str(options["fp_width_value"])) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 3: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set CDPTH=" + str(CDPTH)) cur.execute("update " + segments + " set FDPTH=" + str(FDPTH)) cur.execute("update " + segments + " set AWDTH=" + str(AWDTH)) cur.execute("update " + segments + " set BWDTH=" + str(BWDTH)) segmentsTopo.table.conn.commit() segmentsTopo.close() # values that are 0 gscript.message("") gscript.message("NOTICE: not currently used:") gscript.message("IUPSEG, FLOW, RUNOFF, ETSW, and PPTSW.") gscript.message("All set to 0.") gscript.message("") segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set IUPSEG=" + str(0)) cur.execute("update " + segments + " set FLOW=" + str(0)) cur.execute("update " + segments + " set RUNOFF=" + str(0)) cur.execute("update " + segments + " set ETSW=" + str(0)) cur.execute("update " + segments + " set PPTSW=" + str(0)) segmentsTopo.table.conn.commit() segmentsTopo.close() # Roughness # ICALC=1,2: Manning (in channel) if (options["roughch_raster"] is not "") and (options["roughch_points"] is not ""): gscript.fatal( "Choose either a raster or vector or a value as Manning's n input." ) if options["roughch_raster"] is not "": ROUGHCH = options["roughch_raster"] v.rast_stats( raster=ROUGHCH, method="average", column_prefix="tmp", map=segments, flags="c", ) # v.db_renamecolumn(map=segments, column='tmp_average,ROUGHCH', quiet=True) v.db_update(map=segments, column="ROUGHCH", query_column="tmp_average", quiet=True) v.db_dropcolumn(map=segments, columns="tmp_average", quiet=True) elif options["roughch_points"] is not "": ROUGHCH = options["roughch_points"] gscript.run_command( "v.distance", from_=segments, to=ROUGHCH, upload="to_attr", to_column=options["roughch_pt_col"], column="ROUGHCH", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() ROUGHCH = options["roughch_value"] cur.execute("update " + segments + " set ROUGHCH=" + str(ROUGHCH)) segmentsTopo.table.conn.commit() segmentsTopo.close() # ICALC=2: Manning (overbank) if (options["roughbk_raster"] is not "") and (options["roughbk_points"] is not ""): gscript.fatal( "Choose either a raster or vector or a value as Manning's n input." ) if options["roughbk_raster"] is not "": ROUGHBK = options["roughbk_raster"] v.rast_stats( raster=ROUGHBK, method="average", column_prefix="tmp", map=segments, flags="c", ) v.db_renamecolumn(map=segments, column="tmp_average,ROUGHBK", quiet=True) elif options["roughbk_points"] is not "": ROUGHBK = options["roughbk_points"] gscript.run_command( "v.distance", from_=segments, to=ROUGHBK, upload="to_attr", to_column=options["roughbk_pt_col"], column="ROUGHBK", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() ROUGHBK = options["roughbk_value"] cur.execute("update " + segments + " set ROUGHBK=" + str(ROUGHBK)) segmentsTopo.table.conn.commit() segmentsTopo.close()
reach_columns.append('IREACH integer') reach_columns.append('RCHLEN integer') reach_columns.append('STRTOP double precision') reach_columns.append('SLOPE double precision') reach_columns.append('STRTHICK double precision') reach_columns = ",".join(reach_columns) # Create a map to work with v.extract(input='streams', output='tmp2', type='line', overwrite=True) v.overlay(ainput='tmp2', atype='line', binput='grid', output='reaches', operator='and', overwrite=True) v.db_addcolumn(map='reaches', columns=reach_columns) # Rename a,b columns v.db_renamecolumn(map='reaches', column=('a_x1', 'x1')) v.db_renamecolumn(map='reaches', column=('a_x2', 'x2')) v.db_renamecolumn(map='reaches', column=('a_y1', 'y1')) v.db_renamecolumn(map='reaches', column=('a_y2', 'y2')) v.db_renamecolumn(map='reaches', column=('a_stream_type', 'stream_type')) v.db_renamecolumn(map='reaches', column=('a_type_code', 'type_code')) v.db_renamecolumn(map='reaches', column=('a_cat', 'rnum_cat')) v.db_renamecolumn(map='reaches', column=('a_tostream', 'tostream')) v.db_renamecolumn(map='reaches', column=('a_id', 'segment_id')) v.db_renamecolumn(map='reaches', column=('a_OUTSEG', 'OUTSEG')) v.db_renamecolumn(map='reaches', column=('b_row', 'row')) v.db_renamecolumn(map='reaches', column=('b_col', 'col')) v.db_renamecolumn(map='reaches', column=('b_id', 'cell_id')) # Drop some unnecessary columns v.db_dropcolumn(map='reaches', columns='b_area_m2')