def __init__(self, points_xy=None, align_to_region=True, xmin=None, xmax=None, ymin=None, ymax=None): if points_xy is not None: points = np.array(points_xy) self.xmin = np.min(points[:,0]) self.xmax = np.max(points[:,0]) self.ymin = np.min(points[:,1]) self.ymax = np.max(points[:,1]) else: self.xmin = xmin self.ymin = ymin self.xmax = xmax self.ymax = ymax if align_to_region is not None: reg = region.Region() self.xmin = np.floor( (self.xmin - reg.get_bbox().west) / reg.ewres ) * \ reg.ewres + reg.get_bbox().west self.ymin = np.floor( (self.ymin - reg.get_bbox().south ) / reg.nsres ) * \ reg.nsres + reg.get_bbox().south self.xmax = np.ceil( (self.xmax - reg.get_bbox().east ) / reg.ewres ) * \ reg.ewres + reg.get_bbox().east self.ymax = np.ceil( (self.ymax - reg.get_bbox().north ) / reg.nsres ) * \ reg.nsres + reg.get_bbox().north self.bbox = Bbox() self.bbox.north = self.ymax self.bbox.south = self.ymin self.bbox.west = self.xmin self.bbox.east = self.xmax
def __init__(self, reg=region.Region(), default=False, prec=4, meta=None): self.c_region = ctypes.pointer(libgis.Cell_head()) self.corners = self.llcorner(reg) self.prec = prec self.meta = meta if default: self.get_default() else: self.get_current()
def main(options, flags): # Get the options points = options["points"] strds = options["strds"] output = options["output"] where = options["where"] order = options["order"] column = options["column"] separator = options["separator"] coordinates = options["coordinates"] # Setup separator if separator == "pipe": separator = "|" if separator == "comma": separator = "," if separator == "space": separator = " " if separator == "tab": separator = "\t" if separator == "newline": separator = "\n" use_cats = False write_header = flags["n"] use_raster_region = flags["r"] overwrite = gscript.overwrite() if points and coordinates: gscript.fatal(_("points and coordinates are mutually exclusive")) if not points and not coordinates: gscript.fatal(_("You must specify points or coordinates")) # Make sure the temporal database exists tgis.init() # We need a database interface dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() sp = tgis.open_old_stds(strds, "strds", dbif) maps = sp.get_registered_maps_as_objects(where=where, order=order, dbif=dbif) dbif.close() if not maps: gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id()) # The list of sample points p_list = [] if not coordinates: # Check if the chosen header column is in the vector map vname = points vmapset= "" if "@" in points: vname, vmapset = points.split("@") v = pyvect.VectorTopo(vname, vmapset) v.open("r") col_index = 0 if v.exist() is False: gscript.fatal(_("Vector map <%s> does not exist" %(points))) if not v.table: use_cats = True gscript.warning(_("Vector map <%s> does not have an attribute table, using cats as header column." %(points))) if v.table and column not in v.table.columns: gscript.fatal(_("Vector map <%s> has no column named %s" %(points, column))) if use_cats is False: col_index = list(v.table.columns.names()).index(column) # Create the point list for line in v: if line.gtype == libvect.GV_POINT: if use_cats is False: p = SamplePoint(line.x, line.y, line.cat, line.attrs.values()[col_index]) elif use_cats is True: p = SamplePoint(line.x, line.y, line.cat) p_list.append(p) v.close() else: # Convert the coordinates into sample points coord_list = coordinates.split(",") use_cats = True count = 0 cat = 1 while count < len(coord_list): x = coord_list[count] count += 1 y = coord_list[count] count += 1 p = SamplePoint(float(x), float(y), cat) p_list.append(p) cat += 1 if output: out_file = open(output, "w") else: out_file = sys.stdout # Write the header if write_header: out_file.write("start_time") out_file.write(separator) out_file.write("end_time") out_file.write(separator) count = 0 for p in p_list: count += 1 if use_cats is True: out_file.write(str(p.cat)) else: out_file.write(str(p.column)) if count != len(p_list): out_file.write(separator) out_file.write("\n") # Sorting the points by y-coordinate to make use of the single row cache and read direction sorted_p_list = sorted(p_list, key=SamplePointComparisonY) # Sample the raster maps num = 0 for map in maps: num += 1 sys.stderr.write("Sample map <%s> number %i out of %i\n" %(map.get_name(), num, len(maps))) start, end = map.get_temporal_extent_as_tuple() out_file.write(str(start)) out_file.write(separator) if not end: out_file.write(str(start)) else: out_file.write(str(end)) out_file.write(separator) r = pyrast.RasterRow(map.get_name(), map.get_mapset()) if r.exist() is False: gscript.fatal(_("Raster map <%s> does not exist" %(map.get_id()))) region = None if use_raster_region is True: r.set_region_from_rast() region = pyregion.Region() region.from_rast(map.get_name()) # Open the raster layer after the region settings r.open("r") # Sample the raster maps with the sorted points for p in sorted_p_list: p.value = r.get_value(point=p, region=region) # Write the point values from the original list count = 0 for p in p_list: count += 1 out_file.write(str(p.value)) if count != len(p_list): out_file.write(separator) out_file.write("\n") r.close() out_file.close()
data.open('r') # Open this object for reading segments = [] for cat in selected_cats: points_with_cat = data.cat(cat_id=cat, vtype='lines')[0] subcoords = [] for point in points_with_cat: subcoords.append([point.x, point.y]) segments.append(rn.Segment(_id=cat, to_ids=tostream[cats == cat])) segments[-1].set_EastingNorthing(ENarray=subcoords) segments[-1].calc_x_from_EastingNorthing() data.close() net = rn.Network(segments) bbox = BoundingBox(points_xy=net.segments_xy_flattened()) reg = region.Region() reg.set_bbox(bbox.bbox) reg.write() DEM = garray.array() DEM.read(options['elevation']) DEM = np.flipud(DEM) # nearest or linear? x = np.arange(reg.west + reg.ewres / 2., reg.east, reg.ewres) y = np.arange(reg.south + reg.nsres / 2., reg.north, reg.nsres) itp = RegularGridInterpolator((x, y), DEM.transpose(), method='nearest') for segment in net.segment_list: segment.set_z(itp(segment.EastingNorthing))
def main(): """ Links each river segment to the next downstream segment in a tributary network by referencing its category (cat) number in a new column. "0" means that the river exits the map. """ # Parsing inside function _cat = int(options['cat']) overwrite_flag = gscript.overwrite() elevation = options['elevation'] if elevation == '': elevation = None slope = options['slope'] if slope == '': slope = None accumulation = options['accumulation'] if accumulation == '': accumulation = None direction = options['direction'] if direction == '': direction = None streams = options['streams'] if streams == '': streams = None outstream = options['outstream'] if outstream == '': outstream = None outfile = options['outfile'] if outfile == '': outfile = None # !!!!!!!!!!!!!!!!! # ADD SWITCHES TO INDIVIDUALLY SMOOTH SLOPE, ACCUM, ETC. # !!!!!!!!!!!!!!!!! try: window = float(options['window']) except: window = None try: dx_target = float(options['dx_target']) except: dx_target = None accum_mult = float(options['accum_mult']) if options['units'] == 'm2': accum_label = 'Drainage area [m$^2$]' elif options['units'] == 'km2': accum_label = 'Drainage area [km$^2$]' elif options['units'] == 'cumecs': accum_label = 'Water discharge [m$^3$ s$^{-1}$]' elif options['units'] == 'cfs': accum_label = 'Water discharge [cfs]' else: accum_label = 'Flow accumulation [$-$]' plots = options['plots'].split(',') # Attributes of streams colNames = np.array(vector_db_select(streams)['columns']) colValues = np.array(vector_db_select(streams)['values'].values()) warnings.warn('tostream is not generalized') tostream = colValues[:,colNames == 'tostream'].astype(int).squeeze() cats = colValues[:,colNames == 'cat'].astype(int).squeeze() # = "fromstream" # We can loop over this list to get the shape of the full river network. selected_cats = [] segment = _cat selected_cats.append(segment) # Get all cats in network data = vector.VectorTopo(streams) # Create a VectorTopo object data.open('r') # Open this object for reading if direction == 'downstream': gscript.message("Extracting drainage pathway...",) # Get network while selected_cats[-1] != 0: selected_cats.append(int(tostream[cats == selected_cats[-1]])) #x.append(selected_cats[-1]) selected_cats = selected_cats[:-1] # remove 0 at end gscript.message("Done.") elif direction == 'upstream': gscript.message("Extracting drainage network...",) # GENERALIZE COLUMN NAME!!!!!!!! tostream_col = np.where(np.array(data.table.columns.names()) == 'tostream')[0][0] terminalCats = [_cat] terminal_x_values = [0] netcats = [] net_tocats = [] while len(terminalCats) > 0: for cat in terminalCats: netcats.append(cat) # ALSO UNADVISABLE NAME -- NEED TO GET TOSTREAM, GENERALIZED #print data.table_to_dict() colnum = np.where( np.array(data.table.columns.names()) == 'tostream')[0][0] net_tocats.append(data.table_to_dict()[cat][colnum]) oldcats = terminalCats terminalCats = [] for cat in oldcats: terminalCats += list(cats[tostream == cat]) #data.close() netcats = np.array(netcats) net_tocats = np.array(net_tocats) selected_cats = netcats gscript.message("Done.") segments = [] for cat in selected_cats: points_with_cat = data.cat(cat_id=cat, vtype='lines')[0] subcoords = [] for point in points_with_cat: subcoords.append([point.x, point.y]) segments.append( rn.Segment(_id=cat, to_ids=tostream[cats == cat]) ) segments[-1].set_EastingNorthing(ENarray=subcoords) segments[-1].calc_x_from_EastingNorthing() # x grid spacing #print segments[-1].Easting[-1], segments[-1].Northing[-1] #print segments[-1].EastingNorthing[-1] #print "" if dx_target is not None: dx_target = float(dx_target) segments[-1].set_target_dx_downstream(dx_target) segments[-1].densify_x_E_N() data.close() net = rn.Network(segments) bbox = BoundingBox(points_xy=net.segments_xy_flattened()) reg_to_revert = region.Region() reg = region.Region() # to limit region for computational efficiency reg.set_bbox(bbox.bbox) reg.write() # Network extraction if outstream: selected_cats_str = list(np.array(selected_cats).astype(str)) selected_cats_csv = ','.join(selected_cats_str) v.extract( input=streams, output=outstream, \ cats=selected_cats_csv, overwrite=overwrite_flag ) # All coordinates coords = net.segments_xy_flattened() #x_downstream = # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # UPDATE !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! """ ##### FIND RIGHT SPOT TO ADD CLASS STUFF HERE/BELOW #### # Extract x points in network data = vector.VectorTopo(streams) # Create a VectorTopo object data.open('r') # Open this object for reading coords = [] _i = 0 for i in range(len(data)): if type(data.read(i+1)) is vector.geometry.Line: if data.read(i+1).cat in selected_cats: coords.append(data.read(i+1).to_array()) gscript.core.percent(_i, len(selected_cats), 100./len(selected_cats)) _i += 1 gscript.core.percent(1, 1, 1) coords = np.vstack(np.array(coords)) _dx = np.diff(coords[:,0]) _dy = np.diff(coords[:,1]) x_downstream_0 = np.hstack((0, np.cumsum((_dx**2 + _dy**2)**.5))) x_downstream = x_downstream_0.copy() data.close() """ # TEMPORARY!!!! #x_downstream = get_xEN() #x_downstream_0 = x_downstream[0] # Analysis # Downstream distances -- 0 at mouth net.compute_x_in_network() # Elevation if elevation: gscript.message("Elevation") _include_z = True # Load DEM griddata = garray.array() griddata.read(elevation) griddata = np.flipud(griddata) # Interpolate: nearest or linear? x = np.arange(reg.west + reg.ewres/2., reg.east, reg.ewres) y = np.arange(reg.south + reg.nsres/2., reg.north, reg.nsres) itp = RegularGridInterpolator( (x, y), griddata.transpose(), method='nearest') _i = 0 _lasti = 0 _nexti = 0 for segment in net.segment_list: try: segment.set_z( itp(segment.EastingNorthing) ) except: print segment.EastingNorthing print np.vstack((segment.Easting_original, segment.Northing_original)).transpose() sys.exit() if _i > _nexti: gscript.core.percent( _i, len(net.segment_list), np.floor(_i - _lasti)) _nexti = float(_nexti) + len(net.segment_list)/10. if _nexti > len(net.segment_list): _nexti = len(net.segment_list) - 1 _lasti = _i _i += 1 gscript.core.percent(1, 1, 1) del griddata #warnings.warn('Need to handle window in network') #gscript.core.percent(1, 1, 1) else: _include_z = False # Slope if slope: gscript.message("Slope") _include_S = True _slope = RasterRow(slope) _slope.open('r') _i = 0 _lasti = 0 _nexti = 0 for segment in net.segment_list: sen = segment.EastingNorthing # all E,N S = [] for row in sen: #try: S.append(_slope.get_value(Point(row[0], row[1]))) #except: # print "ERROR" if _i > _nexti: gscript.core.percent(_i, len(coords), np.floor(_i - _lasti)) _nexti = float(_nexti) + len(coords)/10. if _nexti > len(coords): _nexti = len(coords) - 1 _lasti = _i _i += 1 # MAKE SETTER FOR THIS!!!! segment.channel_slope = np.array(S) if window is not None: pass #net.smooth_window() #_x_downstream, _S = moving_average(x_downstream_0, S, window) _slope.close() S = np.array(S) S_0 = S.copy() gscript.core.percent(1, 1, 1) else: _include_S = False # Accumulation / drainage area if accumulation: gscript.message("Accumulation") _include_A = True accumulation = RasterRow(accumulation) accumulation.open('r') _i = 0 _lasti = 0 _nexti = 0 for segment in net.segment_list: A = [] sen = segment.EastingNorthing # all E,N for row in sen: A.append(accumulation.get_value(Point(row[0], row[1])) * accum_mult) if _i > _nexti: gscript.core.percent(_i, len(coords), np.floor(_i - _lasti)) _nexti = float(_nexti) + len(coords)/10. if _nexti > len(coords): _nexti = len(coords) - 1 _lasti = _i _i += 1 # MAKE SETTER FOR THIS!!!! segment.channel_flow_accumulation = np.array(A) accumulation.close() A = np.array(A) A_0 = A.copy() """ if window is not None: _x_downstream, A = moving_average(x_downstream_0, A, window) """ gscript.core.percent(1, 1, 1) else: _include_A = False # Revert to original region reg_to_revert # Smoothing if window is not None: net.smooth_window(window) # Plotting if 'LongProfile' in plots: plt.figure() if window: for segment in net.segment_list: plt.plot(segment.x/1000., segment.z_smoothed, 'k-', linewidth=2) else: for segment in net.segment_list: plt.plot(segment.x/1000., segment.z, 'k-', linewidth=2) #plt.plot(x_downstream/1000., z, 'k-', linewidth=2) plt.xlabel('Distance from mouth [km]', fontsize=16) plt.ylabel('Elevation [m]', fontsize=16) plt.tight_layout() if 'SlopeAccum' in plots: plt.figure() if window: for segment in net.segment_list: _y_points = segment.channel_slope_smoothed[ segment.channel_flow_accumulation_smoothed > 0 ] _x_points = segment.channel_flow_accumulation_smoothed[ segment.channel_flow_accumulation_smoothed > 0 ] plt.loglog(_x_points, _y_points, 'k.', alpha=.5) else: for segment in net.segment_list: _y_points = segment.channel_slope[ segment.channel_flow_accumulation > 0 ] _x_points = segment.channel_flow_accumulation[ segment.channel_flow_accumulation > 0 ] plt.loglog(_x_points, _y_points, 'k.', alpha=.5) plt.xlabel(accum_label, fontsize=16) plt.ylabel('Slope [$-$]', fontsize=16) plt.tight_layout() if 'SlopeDistance' in plots: plt.figure() if window: for segment in net.segment_list: plt.plot(segment.x/1000., segment.channel_slope_smoothed, 'k-', linewidth=2) else: for segment in net.segment_list: plt.plot(segment.x/1000., segment.channel_slope, 'k-', linewidth=2) plt.xlabel('Distance downstream [km]', fontsize=16) plt.ylabel('Slope [$-$]', fontsize=20) plt.tight_layout() if 'AccumDistance' in plots: plt.figure() for segment in net.segment_list: _x_points = segment.x[segment.channel_flow_accumulation > 0] _y_points = segment.channel_flow_accumulation[ segment.channel_flow_accumulation > 0 ] plt.plot(_x_points/1000., _y_points, 'k.', alpha=.5) plt.xlabel('Distance downstream [km]', fontsize=16) plt.ylabel(accum_label, fontsize=16) plt.tight_layout() plt.show() # Saving data -- will need to update for more complex data structures! if outfile: net.compute_profile_from_starting_segment() _outfile = np.vstack((net.long_profile_header, net.long_profile_output)) np.savetxt(outfile, _outfile, '%s') else: pass #print net.accum_from_headwaters[1] - net.slope_from_headwaters[1] """ for segment in net.segment_list: print segment.channel_flow_accumulation_smoothed print segment.channel_slope_smoothed print segment.channel_flow_accumulation_smoothed - \ segment.channel_slope_smoothed """ """