def setUpClass(cls): """Create a not empty table instance""" from grass.pygrass.modules.shortcuts import general as g cls.name = 'elevation' cls.tmp = 'tmp' + cls.name g.copy(rast=[cls.name, cls.tmp], overwrite=True)
def sbare(): rsoillossreclass.flags.u = True rsoillossreclass(maps['soillossbare'].name, 'soillossbare',flags='') if flag_s: print('\n \n Statistics for soilloss <%s> : '%(soillossbaremap.name)) rsoillossstats(soilloss=soillossbaremap.name, map=parcelmap.name, parcelnumcol='id') if not flag_c: g.copy(rast=(soillossbaremap.name,output)) gscript.message('Copy made to <%s> for automatic output' %(output))
def scpmax(): if soillossbarecorrmap.exist(): rsoillosscpmax.inputs.soillossbare = soillossbarecorrmap.name else: rsoillosscpmax.inputs.soillossbare = soillossbaremap.name rsoillosscpmax.inputs.maxsoilloss=maxsoilloss rsoillosscpmax(cpmax=soillosscpmaxmap.name) if not flag_c: g.copy(rast=(soillosscpmaxmap.name,output)) gscript.message('Copy made to <%s> for automatic output' %(output)) if flag_s: print('\n \n Statistics for <%s> : '%(soillosscpmaxmap)) rsoillossstats(soilloss=soillosscpmaxmap.name, map=parcelmap.name, parcelnumcol='id')
def sbareupdate(): rsoillossupdate.inputs.map = parcelmap.name rsoillossupdate.inputs.factorold = maps['kfactor'].name rsoillossupdate.inputs.factorcol = 'kfactor' rsoillossupdate.flags.k = True rsoillossupdate.flags.p = True rsoillossupdate(soillossin=maps['soillossbare'].name, soillossout=soillossbarecorrmap.name) gscript.message('Soilloss for bare soil successfully updated to <%s> using parcelwise kfactor.' %(soillossbarecorrmap.name)) if not flag_c: g.copy(rast=(soillossbarecorrmap.name,output)) gscript.message('Copy made to <%s> for automatic output' %(output)) rsoillossreclass(soillossbarecorrmap.name, 'soillossbare',flags='') gscript.message('Reclassified and colored maps found in <%s.3> and <%s.9> .'%(soillossbarecorrmap.name, soillossbarecorrmap.name)) if flag_s: print('\n \n Statistics for soilloss on bare soil <%s> : '%(soillossgrowmap)) rsoillossstats(soilloss=soillossbarecorrmap.name, map=parcelmap.name, parcelnumcol='id')
def sgrow(): if soillossbarecorrmap.exist(): rsoillossgrow.inputs.soillossbare = soillossbarecorrmap.name else: rsoillossgrow.inputs.soillossbare = soillossbaremap.name rsoillossgrow.inputs.map = parcelmap.name rsoillossgrow.inputs.factorcols = (params['colnames'][('cfactor')],) rsoillossgrow.inputs.factorcols += (params['colnames'][('pfactor')],) rsoillossgrow(soillossgrow=soillossgrowmap.name) gscript.message('Soilloss for grown soil successfully calculated to <%s> using parcelwise C and P factor.' %(soillossgrowmap)) if not flag_c: g.copy(rast=(soillossgrowmap.name,output)) gscript.message('Copy made to <%s> for automatic output' %(output)) rsoillossreclass(soillossgrowmap.name, 'soillossgrow',flags='') gscript.message('Reclassified and colored maps found in <%s.3> and <%s.9> .'%(soillossgrowmap.name, soillossgrowmap.name)) if flag_s: print('\n \n Statistics for soilloss on grown soil <%s> : '%(soillossgrowmap)) rsoillossstats(soilloss=soillossgrowmap.name, map=parcelmap.name, parcelnumcol='id')
def main(): """ Adds GSFLOW parameters to a set of HRU sub-basins """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() basins = options['input'] HRU = options['output'] slope = options['slope'] aspect = options['aspect'] elevation = options['elevation'] land_cover = options['cov_type'] soil = options['soil_type'] ################################ # CREATE HRUs FROM SUB-BASINS # ################################ g.copy(vector=(basins,HRU), overwrite=gscript.overwrite()) ############################################ # ATTRIBUTE COLUMNS (IN ORDER FROM MANUAL) # ############################################ # HRU hru_columns = [] # Self ID hru_columns.append('id integer') # nhru # Basic Physical Attributes (Geometry) hru_columns.append('hru_area double precision') # acres (!!!!) hru_columns.append('hru_area_m2 double precision') # [not for GSFLOW: for me!] hru_columns.append('hru_aspect double precision') # Mean aspect [degrees] hru_columns.append('hru_elev double precision') # Mean elevation hru_columns.append('hru_lat double precision') # Latitude of centroid hru_columns.append('hru_lon double precision') # Longitude of centroid # unnecessary but why not? hru_columns.append('hru_slope double precision') # Mean slope [percent] # Basic Physical Attributes (Other) #hru_columns.append('hru_type integer') # 0=inactive; 1=land; 2=lake; 3=swale; almost all will be 1 #hru_columns.append('elev_units integer') # 0=feet; 1=meters. 0=default. I think I will set this to 1 by default. # Measured input hru_columns.append('outlet_sta integer') # Index of streamflow station at basin outlet: # station number if it has one, 0 if not # Note that the below specify projections and note lat/lon; they really seem # to work for any projected coordinates, with _x, _y, in meters, and _xlong, # _ylat, in feet (i.e. they are just northing and easting). The meters and feet # are not just simple conversions, but actually are required for different # modules in the code, and are hence redundant but intentional. hru_columns.append('hru_x double precision') # Easting [m] hru_columns.append('hru_xlong double precision') # Easting [feet] hru_columns.append('hru_y double precision') # Northing [m] hru_columns.append('hru_ylat double precision') # Northing [feet] # Streamflow and lake routing hru_columns.append('K_coef double precision') # Travel time of flood wave to next downstream segment; # this is the Muskingum storage coefficient # 1.0 for reservoirs, diversions, and segments flowing # out of the basin hru_columns.append('x_coef double precision') # Amount of attenuation of flow wave; # this is the Muskingum routing weighting factor # range: 0.0--0.5; default 0.2 # 0 for all segments flowing out of the basin hru_columns.append('hru_segment integer') # ID of stream segment to which flow will be routed # this is for non-cascade routing (flow goes directly # from HRU to stream segment) hru_columns.append('obsin_segment integer') # Index of measured streamflow station that replaces # inflow to a segment hru_columns.append('cov_type integer') # 0=bare soil;1=grasses; 2=shrubs; 3=trees; 4=coniferous hru_columns.append('soil_type integer') # 1=sand; 2=loam; 3=clay # Create strings hru_columns = ",".join(hru_columns) # Add columns to tables v.db_addcolumn(map=HRU, columns=hru_columns, quiet=True) ########################### # UPDATE DATABASE ENTRIES # ########################### colNames = np.array(gscript.vector_db_select(HRU, layer=1)['columns']) colValues = np.array(gscript.vector_db_select(HRU, layer=1)['values'].values()) number_of_hrus = colValues.shape[0] cats = colValues[:,colNames == 'cat'].astype(int).squeeze() rnums = colValues[:,colNames == 'rnum'].astype(int).squeeze() nhru = np.arange(1, number_of_hrus + 1) nhrut = [] for i in range(len(nhru)): nhrut.append( (nhru[i], cats[i]) ) # Access the HRUs hru = VectorTopo(HRU) # Open the map with topology: hru.open('rw') # Create a cursor cur = hru.table.conn.cursor() # Use it to loop across the table cur.executemany("update "+HRU+" set id=? where cat=?", nhrut) # Commit changes to the table hru.table.conn.commit() # Close the table hru.close() """ # Do the same for basins <-------------- DO THIS OR SIMPLY HAVE HRUs OVERLAIN WITH GRID CELLS? IN THIS CASE, RMV AREA ADDITION TO GRAVRES v.db_addcolumn(map=basins, columns='id int', quiet=True) basins = VectorTopo(basins) basins.open('rw') cur = basins.table.conn.cursor() cur.executemany("update basins set id=? where cat=?", nhrut) basins.table.conn.commit() basins.close() """ # if you want to append to table # cur.executemany("update HRU(id) values(?)", nhrut) # "insert into" will add rows #hru_columns.append('hru_area double precision') # Acres b/c USGS v.to_db(map=HRU, option='area', columns='hru_area', units='acres', quiet=True) v.to_db(map=HRU, option='area', columns='hru_area_m2', units='meters', quiet=True) # GET MEAN VALUES FOR THESE NEXT ONES, ACROSS THE BASIN # SLOPE (and aspect) ##################### v.rast_stats(map=HRU, raster=slope, method='average', column_prefix='tmp', flags='c', quiet=True) v.db_update(map=HRU, column='hru_slope', query_column='tmp_average', quiet=True) # ASPECT ######### v.db_dropcolumn(map=HRU, columns='tmp_average', quiet=True) # Dealing with conversion from degrees (no good average) to something I can # average -- x- and y-vectors # Geographic coordinates, so sin=x, cos=y.... not that it matters so long # as I am consistent in how I return to degrees r.mapcalc('aspect_x = sin(' + aspect + ')', overwrite=gscript.overwrite(), quiet=True) r.mapcalc('aspect_y = cos(' + aspect + ')', overwrite=gscript.overwrite(), quiet=True) #grass.run_command('v.db.addcolumn', map=HRU, columns='aspect_x_sum double precision, aspect_y_sum double precision, ncells_in_hru integer') v.rast_stats(map=HRU, raster='aspect_x', method='sum', column_prefix='aspect_x', flags='c', quiet=True) v.rast_stats(map=HRU, raster='aspect_y', method='sum', column_prefix='aspect_y', flags='c', quiet=True) hru = VectorTopo(HRU) hru.open('rw') cur = hru.table.conn.cursor() cur.execute("SELECT cat,aspect_x_sum,aspect_y_sum FROM %s" %hru.name) _arr = np.array(cur.fetchall()).astype(float) _cat = _arr[:,0] _aspect_x_sum = _arr[:,1] _aspect_y_sum = _arr[:,2] aspect_angle = np.arctan2(_aspect_y_sum, _aspect_x_sum) * 180. / np.pi aspect_angle[aspect_angle < 0] += 360 # all positive aspect_angle_cat = np.vstack((aspect_angle, _cat)).transpose() cur.executemany("update "+ HRU +" set hru_aspect=? where cat=?", aspect_angle_cat) hru.table.conn.commit() hru.close() # ELEVATION ############ v.rast_stats(map=HRU, raster=elevation, method='average', column_prefix='tmp', flags='c', quiet=True) v.db_update(map=HRU, column='hru_elev', query_column='tmp_average', quiet=True) v.db_dropcolumn(map=HRU, columns='tmp_average', quiet=True) # CENTROIDS ############ # get x,y of centroid -- but have areas not in database table, that do have # centroids, and having a hard time finding a good way to get rid of them! # They have duplicate category values! # Perhaps these are little dangles on the edges of the vectorization where # the raster value was the same but pinched out into 1-a few cells? # From looking at map, lots of extra centroids on area boundaries, and removing # small areas (though threshold hard to guess) gets rid of these hru = VectorTopo(HRU) hru.open('rw') hru_cats = [] hru_coords = [] for hru_i in hru: if type(hru_i) is vector.geometry.Centroid: hru_cats.append(hru_i.cat) hru_coords.append(hru_i.coords()) hru_cats = np.array(hru_cats) hru_coords = np.array(hru_coords) hru.rewind() hru_area_ids = [] for coor in hru_coords: _area = hru.find_by_point.area(Point(coor[0], coor[1])) hru_area_ids.append(_area) hru_area_ids = np.array(hru_area_ids) hru.rewind() hru_areas = [] for _area_id in hru_area_ids: hru_areas.append(_area_id.area()) hru_areas = np.array(hru_areas) hru.rewind() allcats = sorted(list(set(list(hru_cats)))) # Now create weighted mean hru_centroid_locations = [] for cat in allcats: hrus_with_cat = hru_cats[hru_cats == cat] if len(hrus_with_cat) == 1: hru_centroid_locations.append((hru_coords[hru_cats == cat]).squeeze()) else: _centroids = hru_coords[hru_cats == cat] #print _centroids _areas = hru_areas[hru_cats == cat] #print _areas _x = np.average(_centroids[:,0], weights=_areas) _y = np.average(_centroids[:,1], weights=_areas) #print _x, _y hru_centroid_locations.append(np.array([_x, _y])) # Now upload weighted mean to database table # allcats and hru_centroid_locations are co-indexed index__cats = create_iterator(HRU) cur = hru.table.conn.cursor() for i in range(len(allcats)): # meters cur.execute('update '+HRU +' set hru_x='+str(hru_centroid_locations[i][0]) +' where cat='+str(allcats[i])) cur.execute('update '+HRU +' set hru_y='+str(hru_centroid_locations[i][1]) +' where cat='+str(allcats[i])) # feet cur.execute('update '+HRU +' set hru_xlong='+str(hru_centroid_locations[i][0]*3.28084) +' where cat='+str(allcats[i])) cur.execute('update '+HRU +' set hru_ylat='+str(hru_centroid_locations[i][1]*3.28084) +' where cat='+str(allcats[i])) # (un)Project to lat/lon _centroid_ll = gscript.parse_command('m.proj', coordinates= list(hru_centroid_locations[i]), flags='od').keys()[0] _lon, _lat, _z = _centroid_ll.split('|') cur.execute('update '+HRU +' set hru_lon='+_lon +' where cat='+str(allcats[i])) cur.execute('update '+HRU +' set hru_lat='+_lat +' where cat='+str(allcats[i])) # feet -- not working. # Probably an issue with index__cats -- maybe fix later, if needed # But currently not a major speed issue """ cur.executemany("update "+HRU+" set hru_xlong=?*3.28084 where hru_x=?", index__cats) cur.executemany("update "+HRU+" set hru_ylat=?*3.28084 where hru_y=?", index__cats) """ cur.close() hru.table.conn.commit() hru.close() # ID NUMBER ############ #cur.executemany("update "+HRU+" set hru_segment=? where id=?", # index__cats) # Segment number = HRU ID number v.db_update(map=HRU, column='hru_segment', query_column='id', quiet=True) # LAND USE/COVER ############ try: land_cover = int(land_cover) except: pass if type(land_cover) is int: if land_cover <= 3: v.db_update(map=HRU, column='cov_type', value=land_cover, quiet=True) else: sys.exit("WARNING: INVALID LAND COVER TYPE. CHECK INTEGER VALUES.\n" "EXITING TO ALLOW USER TO CHANGE BEFORE RUNNING GSFLOW") else: # NEED TO UPDATE THIS TO MODAL VALUE!!!! gscript.message("Warning: values taken from HRU centroids. Code should be updated to") gscript.message("acquire modal values") v.what_rast(map=HRU, type='centroid', raster=land_cover, column='cov_type', quiet=True) #v.rast_stats(map=HRU, raster=land_cover, method='average', column_prefix='tmp', flags='c', quiet=True) #v.db_update(map=HRU, column='cov_type', query_column='tmp_average', quiet=True) #v.db_dropcolumn(map=HRU, columns='tmp_average', quiet=True) # SOIL ############ try: soil = int(soil) except: pass if type(soil) is int: if (soil > 0) and (soil <= 3): v.db_update(map=HRU, column='soil_type', value=soil, quiet=True) else: sys.exit("WARNING: INVALID SOIL TYPE. CHECK INTEGER VALUES.\n" "EXITING TO ALLOW USER TO CHANGE BEFORE RUNNING GSFLOW") else: # NEED TO UPDATE THIS TO MODAL VALUE!!!! gscript.message("Warning: values taken from HRU centroids. Code should be updated to") gscript.message("acquire modal values") v.what_rast(map=HRU, type='centroid', raster=soil, column='soil_type', quiet=True)
def main(): elevation = options['elevation'] slope = options['slope'] flat_thres = float(options['flat_thres']) curv_thres = float(options['curv_thres']) filter_size = int(options['filter_size']) counting_size = int(options['counting_size']) nclasses = int(options['classes']) texture = options['texture'] convexity = options['convexity'] concavity = options['concavity'] features = options['features'] # remove mapset from output name in case of overwriting existing map texture = texture.split('@')[0] convexity = convexity.split('@')[0] concavity = concavity.split('@')[0] features = features.split('@')[0] # store current region settings global current_reg current_reg = parse_key_val(g.region(flags='pg', stdout_=PIPE).outputs.stdout) del current_reg['projection'] del current_reg['zone'] del current_reg['cells'] # check for existing mask and backup if found global mask_test mask_test = gs.list_grouped( type='rast', pattern='MASK')[gs.gisenv()['MAPSET']] if mask_test: global original_mask original_mask = temp_map('tmp_original_mask') g.copy(raster=['MASK', original_mask]) # error checking if flat_thres < 0: gs.fatal('Parameter thres cannot be negative') if filter_size % 2 == 0 or counting_size % 2 == 0: gs.fatal( 'Filter or counting windows require an odd-numbered window size') if filter_size >= counting_size: gs.fatal( 'Filter size needs to be smaller than the counting window size') if features != '' and slope == '': gs.fatal('Need to supply a slope raster in order to produce the terrain classification') # Terrain Surface Texture ------------------------------------------------- # smooth the dem gs.message("Calculating terrain surface texture...") gs.message( "1. Smoothing input DEM with a {n}x{n} median filter...".format( n=filter_size)) filtered_dem = temp_map('tmp_filtered_dem') gs.run_command("r.neighbors", input = elevation, method = "median", size = filter_size, output = filtered_dem, flags='c', quiet=True) # extract the pits and peaks based on the threshold pitpeaks = temp_map('tmp_pitpeaks') gs.message("2. Extracting pits and peaks with difference > thres...") r.mapcalc(expression='{x} = if ( abs({dem}-{median})>{thres}, 1, 0)'.format( x=pitpeaks, dem=elevation, thres=flat_thres, median=filtered_dem), quiet=True) # calculate density of pits and peaks gs.message("3. Using resampling filter to create terrain texture...") window_radius = (counting_size-1)/2 y_radius = float(current_reg['ewres'])*window_radius x_radius = float(current_reg['nsres'])*window_radius resample = temp_map('tmp_density') r.resamp_filter(input=pitpeaks, output=resample, filter=['bartlett','gauss'], radius=[x_radius,y_radius], quiet=True) # convert to percentage gs.message("4. Converting to percentage...") r.mask(raster=elevation, overwrite=True, quiet=True) r.mapcalc(expression='{x} = float({y} * 100)'.format(x=texture, y=resample), quiet=True) r.mask(flags='r', quiet=True) r.colors(map=texture, color='haxby', quiet=True) # Terrain convexity/concavity --------------------------------------------- # surface curvature using lacplacian filter gs.message("Calculating terrain convexity and concavity...") gs.message("1. Calculating terrain curvature using laplacian filter...") # grow the map to remove border effects and run laplacian filter dem_grown = temp_map('tmp_elevation_grown') laplacian = temp_map('tmp_laplacian') g.region(n=float(current_reg['n']) + (float(current_reg['nsres']) * filter_size), s=float(current_reg['s']) - (float(current_reg['nsres']) * filter_size), w=float(current_reg['w']) - (float(current_reg['ewres']) * filter_size), e=float(current_reg['e']) + (float(current_reg['ewres']) * filter_size)) r.grow(input=elevation, output=dem_grown, radius=filter_size, quiet=True) r.mfilter( input=dem_grown, output=laplacian, filter=string_to_rules(laplacian_matrix(filter_size)), quiet=True) # extract convex and concave pixels gs.message("2. Extracting convexities and concavities...") convexities = temp_map('tmp_convexities') concavities = temp_map('tmp_concavities') r.mapcalc( expression='{x} = if({laplacian}>{thres}, 1, 0)'\ .format(x=convexities, laplacian=laplacian, thres=curv_thres), quiet=True) r.mapcalc( expression='{x} = if({laplacian}<-{thres}, 1, 0)'\ .format(x=concavities, laplacian=laplacian, thres=curv_thres), quiet=True) # calculate density of convexities and concavities gs.message("3. Using resampling filter to create surface convexity/concavity...") resample_convex = temp_map('tmp_convex') resample_concav = temp_map('tmp_concav') r.resamp_filter(input=convexities, output=resample_convex, filter=['bartlett','gauss'], radius=[x_radius,y_radius], quiet=True) r.resamp_filter(input=concavities, output=resample_concav, filter=['bartlett','gauss'], radius=[x_radius,y_radius], quiet=True) # convert to percentages gs.message("4. Converting to percentages...") g.region(**current_reg) r.mask(raster=elevation, overwrite=True, quiet=True) r.mapcalc(expression='{x} = float({y} * 100)'.format(x=convexity, y=resample_convex), quiet=True) r.mapcalc(expression='{x} = float({y} * 100)'.format(x=concavity, y=resample_concav), quiet=True) r.mask(flags='r', quiet=True) # set colors r.colors_stddev(map=convexity, quiet=True) r.colors_stddev(map=concavity, quiet=True) # Terrain classification Flowchart----------------------------------------- if features != '': gs.message("Performing terrain surface classification...") # level 1 produces classes 1 thru 8 # level 2 produces classes 5 thru 12 # level 3 produces classes 9 thru 16 if nclasses == 8: levels = 1 if nclasses == 12: levels = 2 if nclasses == 16: levels = 3 classif = [] for level in range(levels): # mask previous classes x:x+4 if level != 0: min_cla = (4*(level+1))-4 clf_msk = temp_map('tmp_clf_mask') rules = '1:{0}:1'.format(min_cla) r.recode( input=classif[level-1], output=clf_msk, rules=string_to_rules(rules), overwrite=True) r.mask(raster=clf_msk, flags='i', quiet=True, overwrite=True) # image statistics smean = r.univar( map=slope, flags='g', stdout_=PIPE).outputs.stdout.split(os.linesep) smean = [i for i in smean if i.startswith('mean=') is True][0].split('=')[1] cmean = r.univar( map=convexity, flags='g', stdout_=PIPE).outputs.stdout.split(os.linesep) cmean = [i for i in cmean if i.startswith('mean=') is True][0].split('=')[1] tmean = r.univar( map=texture, flags='g', stdout_=PIPE).outputs.stdout.split(os.linesep) tmean = [i for i in tmean if i.startswith('mean=') is True][0].split('=')[1] classif.append(temp_map('tmp_classes')) if level != 0: r.mask(flags='r', quiet=True) classification(level+1, slope, smean, texture, tmean, convexity, cmean, classif[level]) # combine decision trees merged = [] for level in range(0, levels): if level > 0: min_cla = (4*(level+1))-4 merged.append(temp_map('tmp_merged')) r.mapcalc( expression='{x} = if({a}>{min}, {b}, {a})'.format( x=merged[level], min=min_cla, a=merged[level-1], b=classif[level])) else: merged.append(classif[level]) g.rename(raster=[merged[-1], features], quiet=True) del TMP_RAST[-1] # Write metadata ---------------------------------------------------------- history = 'r.terrain.texture ' for key,val in options.iteritems(): history += key + '=' + str(val) + ' ' r.support(map=texture, title=texture, description='generated by r.terrain.texture', history=history) r.support(map=convexity, title=convexity, description='generated by r.terrain.texture', history=history) r.support(map=concavity, title=concavity, description='generated by r.terrain.texture', history=history) if features != '': r.support(map=features, title=features, description='generated by r.terrain.texture', history=history) # write color and category rules to tempfiles r.category( map=features, rules=string_to_rules(categories(nclasses)), separator='pipe') r.colors( map=features, rules=string_to_rules(colors(nclasses)), quiet=True) return 0
def main(): """ Builds river segments for input to the USGS hydrologic models PRMS and GSFLOW. """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # I/O streams = options['input'] segments = options['output'] # Hydraulic geometry ICALC = options['icalc'] # ICALC=0: Constant depth WIDTH1 = options['width1'] WIDTH2 = options['width2'] # ICALC=1: Manning ROUGHCH = options['roughch'] # ICALC=2: Manning ROUGHBK = options['roughbk'] # ICALC=3: Power-law relationships (following Leopold and others) # The at-a-station default exponents are from Rhodes (1977) CDPTH = str(float(options['cdpth']) / 35.3146667) # cfs to m^3/s FDPTH = options['fdpth'] AWDTH = str(float(options['awdth']) / 35.3146667) # cfs to m^3/s BWDTH = options['bwdth'] ################################################## # CHECKING DEPENDENCIES WITH OPTIONAL PARAMETERS # ################################################## if ICALC == 3: if CDPTH and FDPTH and AWDTH and BWDTH: pass else: grass.fatal('Missing CDPTH, FDPTH, AWDTH, and/or BWDTH. \ These are required when ICALC = 3.') ########### # RUNNING # ########### # New Columns for Segments segment_columns = [] # Self ID segment_columns.append('id integer') # segment number segment_columns.append('ISEG integer') # segment number segment_columns.append('NSEG integer') # segment number # for GSFLOW segment_columns.append('ICALC integer') # 3 for power function segment_columns.append( 'OUTSEG integer') # downstream segment -- tostream, renumbered segment_columns.append('ROUGHCH double precision') # overbank roughness segment_columns.append('ROUGHBK double precision') # in-channel roughness segment_columns.append('WIDTH1 double precision') # overbank roughness segment_columns.append('WIDTH2 double precision') # in-channel roughness segment_columns.append('CDPTH double precision') # depth coeff segment_columns.append('FDPTH double precision') # depth exp segment_columns.append('AWDTH double precision') # width coeff segment_columns.append('BWDTH double precision') # width exp # The below will be all 0 segment_columns.append( 'IUPSEG varchar') # upstream segment ID number, for diversions segment_columns.append('FLOW varchar') segment_columns.append('RUNOFF varchar') segment_columns.append('ETSW varchar') segment_columns.append('PPTSW varchar') segment_columns = ",".join(segment_columns) # CONSIDER THE EFFECT OF OVERWRITING COLUMNS -- WARN FOR THIS # IF MAP EXISTS ALREADY? # Create a map to work with g.copy(vector=(streams, segments), overwrite=gscript.overwrite()) # and add its columns v.db_addcolumn(map=segments, columns=segment_columns) # Produce the data table entries ################################## colNames = np.array(gscript.vector_db_select(segments, layer=1)['columns']) colValues = np.array( gscript.vector_db_select(segments, layer=1)['values'].values()) number_of_segments = colValues.shape[0] cats = colValues[:, colNames == 'cat'].astype(int).squeeze() nseg = np.arange(1, len(cats) + 1) nseg_cats = [] for i in range(len(cats)): nseg_cats.append((nseg[i], cats[i])) segmentsTopo = VectorTopo(segments) segmentsTopo.open('rw') cur = segmentsTopo.table.conn.cursor() # id = cat (as does ISEG and NSEG) cur.executemany("update " + segments + " set id=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set ISEG=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set NSEG=? where cat=?", nseg_cats) # outseg = tostream: default is 0 if "tostream" is off-map cur.execute("update " + segments + " set OUTSEG=0") cur.executemany("update " + segments + " set OUTSEG=? where tostream=?", nseg_cats) # Discharge and hydraulic geometry cur.execute("update " + segments + " set WIDTH1=" + str(WIDTH1)) cur.execute("update " + segments + " set WIDTH2=" + str(WIDTH2)) cur.execute("update " + segments + " set ROUGHCH=" + str(ROUGHCH)) cur.execute("update " + segments + " set ROUGHBK=" + str(ROUGHBK)) cur.execute("update " + segments + " set ICALC=" + str(ICALC)) cur.execute("update " + segments + " set CDPTH=" + str(CDPTH)) cur.execute("update " + segments + " set FDPTH=" + str(FDPTH)) cur.execute("update " + segments + " set AWDTH=" + str(AWDTH)) cur.execute("update " + segments + " set BWDTH=" + str(BWDTH)) gscript.message('') gscript.message('NOTICE: not currently used:') gscript.message('IUPSEG, FLOW, RUNOFF, ETSW, and PPTSW.') gscript.message('All set to 0.') gscript.message('') # values that are 0 cur.execute("update " + segments + " set IUPSEG=" + str(0)) cur.execute("update " + segments + " set FLOW=" + str(0)) cur.execute("update " + segments + " set RUNOFF=" + str(0)) cur.execute("update " + segments + " set ETSW=" + str(0)) cur.execute("update " + segments + " set PPTSW=" + str(0)) segmentsTopo.table.conn.commit() segmentsTopo.close()
def main(): """ Builds river segments for input to the USGS hydrologic models PRMS and GSFLOW. """ ################## # OPTION PARSING # ################## options, flags = gscript.parser() # I/O streams = options["input"] segments = options["output"] # Hydraulic geometry ICALC = int(options["icalc"]) # ICALC=0: Constant depth WIDTH1 = options["width1"] WIDTH2 = options["width2"] # ICALC=1,2: Manning (in channel and overbank): below # ICALC=3: Power-law relationships (following Leopold and others) # The at-a-station default exponents are from Rhodes (1977) CDPTH = str(float(options["cdpth"]) / 35.3146667) # cfs to m^3/s FDPTH = options["fdpth"] AWDTH = str(float(options["awdth"]) / 35.3146667) # cfs to m^3/s BWDTH = options["bwdth"] ################################################## # CHECKING DEPENDENCIES WITH OPTIONAL PARAMETERS # ################################################## if ICALC == 3: if CDPTH and FDPTH and AWDTH and BWDTH: pass else: gscript.fatal("Missing CDPTH, FDPTH, AWDTH, and/or BWDTH. \ These are required when ICALC = 3.") ########### # RUNNING # ########### # New Columns for Segments segment_columns = [] # Self ID segment_columns.append("id integer") # segment number segment_columns.append("ISEG integer") # segment number segment_columns.append("NSEG integer") # segment number # for GSFLOW segment_columns.append( "ICALC integer" ) # 1 for channel, 2 for channel+fp, 3 for power function segment_columns.append( "OUTSEG integer") # downstream segment -- tostream, renumbered segment_columns.append("ROUGHCH double precision") # overbank roughness segment_columns.append("ROUGHBK double precision") # in-channel roughness segment_columns.append("WIDTH1 double precision") # overbank roughness segment_columns.append("WIDTH2 double precision") # in-channel roughness segment_columns.append("CDPTH double precision") # depth coeff segment_columns.append("FDPTH double precision") # depth exp segment_columns.append("AWDTH double precision") # width coeff segment_columns.append("BWDTH double precision") # width exp segment_columns.append( "floodplain_width double precision" ) # floodplain width (8-pt approx channel + flat fp) # The below will be all 0 segment_columns.append( "IUPSEG varchar") # upstream segment ID number, for diversions segment_columns.append("FLOW varchar") segment_columns.append("RUNOFF varchar") segment_columns.append("ETSW varchar") segment_columns.append("PPTSW varchar") segment_columns = ",".join(segment_columns) # CONSIDER THE EFFECT OF OVERWRITING COLUMNS -- WARN FOR THIS # IF MAP EXISTS ALREADY? # Create a map to work with g.copy(vector=(streams, segments), overwrite=gscript.overwrite()) # and add its columns v.db_addcolumn(map=segments, columns=segment_columns) # Produce the data table entries ################################## colNames = np.array(gscript.vector_db_select(segments, layer=1)["columns"]) colValues = np.array( gscript.vector_db_select(segments, layer=1)["values"].values()) number_of_segments = colValues.shape[0] cats = colValues[:, colNames == "cat"].astype(int).squeeze() nseg = np.arange(1, len(cats) + 1) nseg_cats = [] for i in range(len(cats)): nseg_cats.append((nseg[i], cats[i])) segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() # id = cat (as does ISEG and NSEG) cur.executemany("update " + segments + " set id=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set ISEG=? where cat=?", nseg_cats) cur.executemany("update " + segments + " set NSEG=? where cat=?", nseg_cats) # outseg = tostream: default is 0 if "tostream" is off-map cur.execute("update " + segments + " set OUTSEG=0") cur.executemany("update " + segments + " set OUTSEG=? where tostream=?", nseg_cats) # Hydraulic geometry selection cur.execute("update " + segments + " set ICALC=" + str(ICALC)) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 0: gscript.message("") gscript.message("ICALC=0 (constant) not supported") gscript.message("Continuing nonetheless.") gscript.message("") if ICALC == 1: if options["width_points"] is not "": # Can add machinery here for separate upstream and downstream widths # But really should not vary all that much # v.to_db(map=segments, option='start', columns='xr1,yr1') # v.to_db(map=segments, option='end', columns='xr2,yr2') gscript.run_command( "v.distance", from_=segments, to=options["width_points"], upload="to_attr", to_column=options["width_points_col"], column="WIDTH1", ) v.db_update(map=segments, column="WIDTH2", query_column="WIDTH1") else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set WIDTH1=" + str(WIDTH1)) cur.execute("update " + segments + " set WIDTH2=" + str(WIDTH2)) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 2: # REMOVE THIS MESSAGE ONCE THIS IS INCLUDED IN INPUT-FILE BUILDER gscript.message("") gscript.message("ICALC=2 (8-point channel + floodplain) not supported") gscript.message("Continuing nonetheless.") gscript.message("") if options["fp_width_pts"] is not "": gscript.run_command( "v.distance", from_=segments, to=options["fp_width_pts"], upload="to_attr", to_column=options["fp_width_pts_col"], column="floodplain_width", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set floodplain_width=" + str(options["fp_width_value"])) segmentsTopo.table.conn.commit() segmentsTopo.close() if ICALC == 3: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set CDPTH=" + str(CDPTH)) cur.execute("update " + segments + " set FDPTH=" + str(FDPTH)) cur.execute("update " + segments + " set AWDTH=" + str(AWDTH)) cur.execute("update " + segments + " set BWDTH=" + str(BWDTH)) segmentsTopo.table.conn.commit() segmentsTopo.close() # values that are 0 gscript.message("") gscript.message("NOTICE: not currently used:") gscript.message("IUPSEG, FLOW, RUNOFF, ETSW, and PPTSW.") gscript.message("All set to 0.") gscript.message("") segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() cur.execute("update " + segments + " set IUPSEG=" + str(0)) cur.execute("update " + segments + " set FLOW=" + str(0)) cur.execute("update " + segments + " set RUNOFF=" + str(0)) cur.execute("update " + segments + " set ETSW=" + str(0)) cur.execute("update " + segments + " set PPTSW=" + str(0)) segmentsTopo.table.conn.commit() segmentsTopo.close() # Roughness # ICALC=1,2: Manning (in channel) if (options["roughch_raster"] is not "") and (options["roughch_points"] is not ""): gscript.fatal( "Choose either a raster or vector or a value as Manning's n input." ) if options["roughch_raster"] is not "": ROUGHCH = options["roughch_raster"] v.rast_stats( raster=ROUGHCH, method="average", column_prefix="tmp", map=segments, flags="c", ) # v.db_renamecolumn(map=segments, column='tmp_average,ROUGHCH', quiet=True) v.db_update(map=segments, column="ROUGHCH", query_column="tmp_average", quiet=True) v.db_dropcolumn(map=segments, columns="tmp_average", quiet=True) elif options["roughch_points"] is not "": ROUGHCH = options["roughch_points"] gscript.run_command( "v.distance", from_=segments, to=ROUGHCH, upload="to_attr", to_column=options["roughch_pt_col"], column="ROUGHCH", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() ROUGHCH = options["roughch_value"] cur.execute("update " + segments + " set ROUGHCH=" + str(ROUGHCH)) segmentsTopo.table.conn.commit() segmentsTopo.close() # ICALC=2: Manning (overbank) if (options["roughbk_raster"] is not "") and (options["roughbk_points"] is not ""): gscript.fatal( "Choose either a raster or vector or a value as Manning's n input." ) if options["roughbk_raster"] is not "": ROUGHBK = options["roughbk_raster"] v.rast_stats( raster=ROUGHBK, method="average", column_prefix="tmp", map=segments, flags="c", ) v.db_renamecolumn(map=segments, column="tmp_average,ROUGHBK", quiet=True) elif options["roughbk_points"] is not "": ROUGHBK = options["roughbk_points"] gscript.run_command( "v.distance", from_=segments, to=ROUGHBK, upload="to_attr", to_column=options["roughbk_pt_col"], column="ROUGHBK", ) else: segmentsTopo = VectorTopo(segments) segmentsTopo.open("rw") cur = segmentsTopo.table.conn.cursor() ROUGHBK = options["roughbk_value"] cur.execute("update " + segments + " set ROUGHBK=" + str(ROUGHBK)) segmentsTopo.table.conn.commit() segmentsTopo.close()
def main(): soillossin = options['soillossin'] soillossout = options['soillossout'] factorold = options['factorold'] factornew = options['factornew'] map = options['map'] factorcol = options['factorcol'] flag_p = flags['p'] # patch factornew with factorold flag_k = flags['k'] # calculate k-factor components from % clay p_T, silt p_U, stones p_st, humus p_H if not factornew: factors = {} if flag_k: gscript.message('Using factor derived from \ soil components.') parcelmap = Vect(map) parcelmap.open(mode='rw', layer=1) parcelmap.table.filters.select() cur = parcelmap.table.execute() col_names = [cn[0] for cn in cur.description] rows = cur.fetchall() for col in (u'Kb',u'Ks',u'Kh', u'K'): if col not in parcelmap.table.columns: parcelmap.table.columns.add(col,u'DOUBLE') for row in rows: rowid = row[1] p_T = row[7] p_U = row[8] p_st = row[9] p_H = row[10] print("Parzelle mit id %d :" %rowid) for sublist in bodenarten: # p_T and p_U if p_T in range(sublist[2],sublist[3]) \ and p_U in range(sublist[4],sublist[5]) : print('Bodenart "' + sublist[1] + '", Kb = ' + str(sublist[6])) Kb = sublist[6] break for sublist in skelettgehalte: if p_st < sublist[0]: print('Skelettgehaltsklasse bis ' + str(sublist[0]) + ' , Ks = ' + str(sublist[1])) Ks = sublist[1] break for sublist in humusgehalte: if p_H < sublist[0]: print('Humusgehaltsklasse bis ' + str(sublist[0]) + ' , Ks = ' + str(sublist[1])) Kh = sublist[1] break K = Kb * Ks * Kh print('K = ' + str(K)) if K > 0: parcelmap.table.execute("UPDATE " + parcelmap.name + " SET" + " Kb=" + str(Kb) + ", Ks=" + str(Ks) + ", Kh=" + str(Kh) + ", K=" + str(K) + " WHERE id=" + str(rowid) ) parcelmap.table.conn.commit() parcelmap.close() factorcol2 = 'K' factors['k'] = map.split('@')[0]+'.tmp.'+factorcol2 v.to_rast(input=map, use='attr', attrcolumn=factorcol2, output=factors['k']) r.null(map=factors['k'], setnull='0') if factorcol: gscript.message('Using factor from column %s of \ vector map <%s>.' % (factorcol, map) ) factors['factorcol'] = map.split('@')[0]+'.tmp.' + factorcol v.to_rast(input=map, use='attr', attrcolumn=factorcol, output=factors['factorcol']) r.null(map=factors['factorcol'], setnull='0') print factors.keys() if not 'k' in factors and not 'factorcol' in factors: gscript.fatal('Please provide either factor \ raster map or valid vector map with factor column \ (kfactor) or factor components columns (Kb, Ks, Kh)' ) #if 'k' in factors and 'factorcol' in factors: factornew = map.split('@')[0]+'.kfactor' if 'k' in factors and 'factorcol' in factors: factornew = map.split('@')[0]+'.kfactor' r.patch(input=(factors['factorcol'],factors['k']), output=factornew) elif 'k' in factors: g.copy(rast=(factors['k'],factornew)) elif 'factorcol' in factors: g.copy(rast=(factors['factorcol'],factornew)) if flag_p: #factorcorr = factorold + '.update' r.patch(input=(factornew,factorold), output=factornew) formula = soillossout + '=' + soillossin \ + '/' + factorold \ + '*' + factornew r.mapcalc(formula) r.colors(map=soillossout, raster=soillossin)
def smeasure(): gscript.message('Import <%s>' % measuremap.name) measuremap.autoimport('measures', overwrite=True, quiet=quiet, where="betrieb_id = %s" % betriebid) soillossbaremap = maps['soillossbare'] kfactormap = maps['kfactor'] if soillossbarecorrmap.exist(): gscript.message('Using updated soillossbare map.') soillossbaremap = soillossbarecorrmap kfactormap = Rast(parcelmap.name + '.kfactor') if flag_b: measurebarriermap = Vect(measuremap.name + '_barrier') v.extract(input=measuremap.name, where="barrier = 1", output=measurebarriermap.name) measurefieldblockmap = Vect(measuremap.name + '_fieldblocks') v.overlay(ainput=maps['fieldblocks'].name, binput=measurebarriermap.name,\ operator='not', output=measurefieldblockmap.name) rsoillossbare.inputs.elevation = maps['elevation'].name rsoillossbare.inputs.rfactor = maps['rfactor'].name rsoillossbare.inputs.kfactor = kfactormap.name rsoillossbare.inputs.map = measurefieldblockmap.name rsoillossbare.inputs.constant_m = '0.6' rsoillossbare.inputs.constant_n = '1.4' rsoillossbare.flags.r = True rsoillossbare(soillossbare=soillossbarebarriermap.name) soillossbaremap = soillossbarebarriermap parcelpfactor = parcelmap.name + '.pfactor' parcelcfactor = parcelmap.name + '.cfactor' v.to_rast(input=parcelmap.name, use='attr', attrcolumn='pfactor', output=parcelpfactor) v.to_rast(input=parcelmap.name, use='attr', attrcolumn='cfactor', output=parcelcfactor) measurepfactor = measuremap.name + '.pfactor' measurecfactor = measuremap.name + '.cfactor' v.to_rast(input=measuremap.name, use='attr', attrcolumn='pfactor', output=measurepfactor) v.to_rast(input=measuremap.name, use='attr', attrcolumn='cfactor', output=measurecfactor) pfactor = parcelmap.name + '.pfactor.measure' cfactor = parcelmap.name + '.cfactor.measure' r.patch(input=(measurepfactor,parcelpfactor), output=pfactor) r.patch(input=(measurecfactor,parcelcfactor), output=cfactor) rsoillossgrow.inputs.soillossbare = soillossbaremap.name rsoillossgrow.inputs.cfactor = pfactor rsoillossgrow.inputs.pfactor = cfactor rsoillossgrow(soillossgrow=soillossmeasuremap.name) rsoillossreclass(soillossmeasuremap.name, 'soillossgrow',flags='') gscript.message('Reclassified and colored maps found in <%s.3> and <%s.9> .'%(soillossmeasuremap.name, soillossmeasuremap.name)) if flag_s: gscript.message('\n \n Statistics for soilloss on grown soil <%s> : '%(soillossgrowmap)) rsoillossstats(soilloss=soillossmeasuremap.name, map=parcelmap.name, parcelnumcol='id') if not flag_c: g.copy(rast=(soillossmeasuremap.name,output)) gscript.message('Copy made to <%s> for automatic output' %(output))