def NEW_C_intersection(line0, line1): """Returns intersecting point between two line segments. However, if parallel lines coincide partly (i.e. share a common segment), the line segment where lines coincide is returned Inputs: line0, line1: Each defined by two end points as in: [[x0, y0], [x1, y1]] A line can also be a 2x2 numpy array with each row corresponding to a point. Output: status, value - where status and value is interpreted as follows: status == 0: no intersection, value set to None. status == 1: intersection point found and returned in value as [x,y]. status == 2: Collinear overlapping lines found. Value takes the form [[x0,y0], [x1,y1]]. status == 3: Collinear non-overlapping lines. Value set to None. status == 4: Lines are parallel. Value set to None. """ line0 = ensure_numeric(line0, num.float) line1 = ensure_numeric(line1, num.float) status, value = _intersection(line0[0, 0], line0[0, 1], line0[1, 0], line0[1, 1], line1[0, 0], line1[0, 1], line1[1, 0], line1[1, 1]) return status, value
def test_smooth_attributes_to_mesh(self): a = [0.0, 0.0] b = [0.0, 5.0] c = [5.0, 0.0] points = [a, b, c] triangles = [ [1,0,2] ] #bac d1 = [1.0, 1.0] d2 = [1.0, 3.0] d3 = [3.0,1.0] z1 = 2 z2 = 4 z3 = 4 data_coords = [d1, d2, d3] z = [z1, z2, z3] fit = Fit(points, triangles, alpha=0) #print "interp.get_A()", interp.get_A() fit._build_matrix_AtA_Atz(ensure_numeric(data_coords), ensure_numeric(z)) #print "Atz - from fit", fit.Atz #print "AtA - from fit", fit.AtA.todense() #print "z",z assert num.allclose(fit.Atz, [2.8, 3.6, 3.6], atol=1e-7) f = fit.fit() answer = [0, 5., 5.] #print "f\n",f #print "answer\n",answer assert num.allclose(f, answer, atol=1e-7)
def calc_cost(self): """ Once the damage has been calculated, determine the $ cost. """ # ensure_numeric does not cut it. self.struct_loss = self.struct_damage * \ ensure_numeric(self.struct_costs) self.contents_loss = self.contents_damage * \ ensure_numeric(self.content_costs)
def is_inside_triangle(point, triangle, closed=True, rtol=1.0e-12, atol=1.0e-12, check_inputs=True): """Determine if one point is inside a triangle This uses the barycentric method: Triangle is A, B, C Point P can then be written as P = A + alpha * (C-A) + beta * (B-A) or if we let v=P-A, v0=C-A, v1=B-A v = alpha*v0 + beta*v1 Dot this equation by v0 and v1 to get two: dot(v0, v) = alpha*dot(v0, v0) + beta*dot(v0, v1) dot(v1, v) = alpha*dot(v1, v0) + beta*dot(v1, v1) or if a_ij = dot(v_i, v_j) and b_i = dot(v_i, v) the matrix equation: a_00 a_01 alpha b_0 = a_10 a_11 beta b_1 Solving for alpha and beta yields: alpha = (b_0*a_11 - b_1*a_01)/denom beta = (b_1*a_00 - b_0*a_10)/denom with denom = a_11*a_00 - a_10*a_01 The point is in the triangle whenever alpha and beta and their sums are in the unit interval. rtol and atol will determine how close the point has to be to the edge before it is deemed to be on the edge. """ triangle = ensure_numeric(triangle) point = ensure_numeric(point, num.float) if check_inputs is True: msg = 'is_inside_triangle must be invoked with one point only' assert num.allclose(point.shape, [2]), msg # Use C-implementation return bool(_is_inside_triangle(point, triangle, int(closed), rtol, atol))
def remove_lone_verts(verts, triangles, number_of_full_nodes=None): """Removes vertices that are not associated with any triangles. verts is a list/array of points. triangles is a list of 3 element tuples. Each tuple represents a triangle. number_of_full_nodes relate to parallelism when a mesh has an extra layer of ghost points. """ verts = ensure_numeric(verts) triangles = ensure_numeric(triangles) N = len(verts) # initialise the array to easily find the index of the first loner # ie, if N=3 -> [6,5,4] loners=num.arange(2*N, N, -1) for t in triangles: for vert in t: try: loners[vert]= vert # all non-loners will have loners[i]=i except IndexError: msg = 'IndexError: t = '+str(t)+' vert = '+str(vert)+' N = '+str(N) raise Exception(msg) lone_start = 2*N - max(loners) # The index of the first loner if lone_start-1 == N: # no loners pass elif min(loners[lone_start:N]) > N: # All the loners are at the end of the vert array verts = verts[0:lone_start] else: # change the loners list so it can be used to modify triangles # Remove the loners from verts # Could've used X=compress(less(loners,N),loners) # verts=num.take(verts,X,axis=0) to Remove the loners from verts # but I think it would use more memory new_i = lone_start # point at first loner - 'shuffle down' target for i in range(lone_start, N): if loners[i] >= N: # [i] is a loner, leave alone pass else: # a non-loner, move down loners[i] = new_i verts[new_i] = verts[i] new_i += 1 verts = verts[0:new_i] # Modify the triangles triangles = num.choose(triangles,loners) return verts, triangles
def __init__(self, quantity_name, file_name, time_step_count, time_step, lon, lat): """Instantiate a Write_nc instance (NetCDF file writer). time_step_count is the number of time steps. time_step is the time step size pre-condition: quantity_name must be 'HA', 'UA'or 'VA'. """ self.quantity_name = quantity_name quantity_units = {'HA':'CENTIMETERS', 'UA':'CENTIMETERS/SECOND', 'VA':'CENTIMETERS/SECOND'} multiplier_dic = {'HA':100.0, # To convert from m to cm 'UA':100.0, # and m/s to cm/sec 'VA':-100.0} # MUX files have positive x in the # Southern direction. This corrects # for it, when writing nc files. self.quantity_multiplier = multiplier_dic[self.quantity_name] #self.file_name = file_name self.time_step_count = time_step_count self.time_step = time_step # NetCDF file definition self.outfile = NetCDFFile(file_name, netcdf_mode_w) outfile = self.outfile #Create new file nc_lon_lat_header(outfile, lon, lat) # TIME outfile.createDimension(time_name, None) outfile.createVariable(time_name, precision, (time_name,)) #QUANTITY outfile.createVariable(self.quantity_name, precision, (time_name, lat_name, lon_name)) outfile.variables[self.quantity_name].missing_value = -1.e+034 outfile.variables[self.quantity_name].units = \ quantity_units[self.quantity_name] outfile.variables[lon_name][:]= ensure_numeric(lon) outfile.variables[lat_name][:]= ensure_numeric(lat)
def get_absolute(self, points): """Given a set of points geo referenced to this instance, return the points as absolute values. """ # remember if we got a list is_list = isinstance(points, list) points = ensure_numeric(points, num.float) if len(points.shape) == 1: # One point has been passed msg = "Single point must have two elements" if not len(points) == 2: raise ShapeError, msg msg = "Input must be an N x 2 array or list of (x,y) values. " msg += "I got an %d x %d array" % points.shape if not points.shape[1] == 2: raise ShapeError, msg # Add geo ref to points if not self.is_absolute(): points = copy.copy(points) # Don't destroy input points[:, 0] += self.xllcorner points[:, 1] += self.yllcorner if is_list: points = points.tolist() return points
def get_relative(self, points): """Convert points to relative measurement. points Points to convert to relative measurements Returns a set of points relative to the geo_reference instance. This is the inverse of get_absolute(). """ # remember if we got a list is_list = isinstance(points, list) points = ensure_numeric(points, num.float) if len(points.shape) == 1: # One point has been passed msg = "Single point must have two elements" if not len(points) == 2: raise ShapeError, msg if not points.shape[1] == 2: msg = "Input must be an N x 2 array or list of (x,y) values. " "I got an %d x %d array" % points.shape raise ShapeError, msg # Subtract geo ref from points if not self.is_absolute(): points = copy.copy(points) # Don't destroy input points[:, 0] -= self.xllcorner points[:, 1] -= self.yllcorner if is_list: points = points.tolist() return points
def test_bigger(self): """test_bigger test larger mesh """ points, vertices, boundary = rectangular(4, 4, 1, 1) mesh = Mesh(points, vertices, boundary) #Test that points are arranged in a counter clock wise order mesh.check_integrity() root = MeshQuadtree(mesh) root.set_last_triangle() for x in [[0.6, 0.3], [0.1, 0.2], [0.7,0.7], [0.1,0.9], [0.4,0.6], [0.9,0.1], [10, 3]]: found, s0, s1, s2, k = root.search_fast(ensure_numeric(x)) if k >= 0: V = mesh.get_vertex_coordinates(k) # nodes for triangle k assert is_inside_polygon(x, V) assert found is True #print k, x else: assert found is False
def decimate_polygon(polygon, factor=10): """Reduce number of points in polygon by the specified factor (default=10, hence the name of the function) such that the extrema in both axes are preserved. Reduce number of points in polygon by the specified factor. polygon The polygon to reduce. factor The factor to reduce polygon points by (default 10). The extrema of both axes are preserved. Return reduced polygon """ # FIXME(Ole): This doesn't work at present, # but it isn't critical either # Find outer extent of polygon num_polygon = ensure_numeric(polygon) max_x = max(num_polygon[:, 0]) max_y = max(num_polygon[:, 1]) min_x = min(num_polygon[:, 0]) min_y = min(num_polygon[:, 1]) # Keep only some points making sure extrema are kept reduced_polygon = [] for i, point in enumerate(polygon): if point[0] in [min_x, max_x] and point[1] in [min_y, max_y]: # Keep reduced_polygon.append(point) else: if len(reduced_polygon)*factor < i: reduced_polygon.append(point) return reduced_polygon
def polygon_area(input_polygon): """ Determine area of arbitrary polygon. input_polygon The polygon to get area of. return A scalar value for the polygon area. Reference: http://mathworld.wolfram.com/PolygonArea.html """ # Move polygon to origin (0,0) to avoid rounding errors # This makes a copy of the polygon to avoid destroying it input_polygon = ensure_numeric(input_polygon) min_x = min(input_polygon[:, 0]) min_y = min(input_polygon[:, 1]) polygon = input_polygon - [min_x, min_y] # Compute area n = len(polygon) poly_area = 0.0 for i in range(n): pti = polygon[i] if i == n-1: pt1 = polygon[0] else: pt1 = polygon[i+1] xi = pti[0] yi1 = pt1[1] xi1 = pt1[0] yi = pti[1] poly_area += xi*yi1 - xi1*yi return abs(poly_area/2)
def not_polygon_overlap(triangles, polygon, verbose=False): """Determine if a polygon and triangle overlap """ polygon = ensure_numeric(polygon) triangles = ensure_numeric(triangles) M = triangles.shape[0]/3 # Number of triangles indices = num.zeros(M, num.int) count = _polygon_overlap(polygon, triangles, indices) if verbose: log.critical('Found %d triangles (out of %d) that polygon' % (count, M)) return indices[count:]
def line_intersect(triangles, line, verbose=False): """Determine which of a list of trianglee intersect a line """ line = ensure_numeric(line) triangles = ensure_numeric(triangles) M = triangles.shape[0]/3 # Number of triangles indices = num.zeros(M, num.int) count = _line_intersect(line, triangles, indices) if verbose: log.critical('Found %d triangles (out of %d) that intersect line' % (count, M)) return indices[:count]
def not_line_intersect(triangles, line, verbose=False): """Determine if a polyline and triangle overlap """ line = ensure_numeric(line) triangles = ensure_numeric(triangles) M = triangles.shape[0]/3 # Number of triangles indices = num.zeros(M, num.int) count = _line_intersect(line, triangles, indices) if verbose: log.critical('Found %d triangles (out of %d) that intersect the line' % (count, M)) return indices[count:]
def is_complex(polygon, closed=True, verbose=False): """Check if a polygon is complex (self-intersecting). Uses a sweep algorithm that is O(n^2) in the worst case, but for most normal looking polygons it'll be O(n log n). polygon is a list of points that define a closed polygon. verbose will print a list of the intersection points if true Return True if polygon is complex. """ def key_xpos(item): """ Return the x coord out of the passed point for sorting key. """ return (item[0][0]) def segments_joined(seg0, seg1): """ See if there are identical segments in the 2 lists. """ for i in seg0: for j in seg1: if i == j: return True return False polygon = ensure_numeric(polygon, num.float) # build a list of discrete segments from the polygon unsorted_segs = [] for i in range(0, len(polygon)-1): unsorted_segs.append([list(polygon[i]), list(polygon[i+1])]) if closed: unsorted_segs.append([list(polygon[0]), list(polygon[-1])]) # all segments must point in same direction for val in unsorted_segs: if val[0][0] > val[1][0]: val[0], val[1] = val[1], val[0] l_x = sorted(unsorted_segs, key=key_xpos) comparisons = 0 # loop through, only comparing lines that partially overlap in x for index, leftmost in enumerate(l_x): cmp = index+1 while cmp < len(l_x) and leftmost[1][0] > l_x[cmp][0][0]: if not segments_joined(leftmost, l_x[cmp]): (type, point) = intersection(leftmost, l_x[cmp]) comparisons += 1 if type != 0 and type != 4 and type != 3 or (type == 2 and list(point[0]) !=\ list(point[1])): if verbose: print 'Self-intersecting polygon found, type ', type print 'point', point, print 'vertices: ', leftmost, ' - ', l_x[cmp] return True cmp += 1 return False
def set_w_uh_vh(self, w_uh_vh = None): self.w_uh_vh = w_uh_vh self.w_uh_vh_type = determine_function_type(w_uh_vh) if self.w_uh_vh_type == 'array': self.w_uh_vh = ensure_numeric(self.w_uh_vh) elif self.w_uh_vh_type == 'scalar': self.w_uh_vh = float(self.w_uh_vh)
def separate_points_by_polygon(points, polygon, closed=True, check_input=True, verbose=False): """Determine whether points are inside or outside a polygon Input: points - Tuple of (x, y) coordinates, or list of tuples polygon - list of vertices of polygon closed - (optional) determine whether points on boundary should be regarded as belonging to the polygon (closed = True) or not (closed = False) check_input: Allows faster execution if set to False Outputs: indices: array of same length as points with indices of points falling inside the polygon listed from the beginning and indices of points falling outside listed from the end. count: count of points falling inside the polygon The indices of points inside are obtained as indices[:count] The indices of points outside are obtained as indices[count:] Examples: U = [[0,0], [1,0], [1,1], [0,1]] #Unit square separate_points_by_polygon( [[0.5, 0.5], [1, -0.5], [0.3, 0.2]], U) will return the indices [0, 2, 1] and count == 2 as only the first and the last point are inside the unit square Remarks: The vertices may be listed clockwise or counterclockwise and the first point may optionally be repeated. Polygons do not need to be convex. Polygons can have holes in them and points inside a hole is regarded as being outside the polygon. Algorithm is based on work by Darel Finley, http://www.alienryderflex.com/polygon/ Uses underlying C-implementation in polygon_ext.c """ if check_input: #Input checks assert isinstance(closed, bool), \ 'Keyword argument "closed" must be boolean' assert isinstance(verbose, bool), \ 'Keyword argument "verbose" must be boolean' try: points = ensure_numeric(points, num.float) except NameError, e: raise NameError, e except:
def test_underlying_function(self): """test_larger mesh and different quad trees """ return points, vertices, boundary = rectangular(2, 2, 1, 1) mesh = Mesh(points, vertices, boundary) root = MeshQuadtree(mesh) root.set_last_triangle() # One point x = ensure_numeric([0.5, 0.5]) found, sigma0, sigma1, sigma2, k = \ root._search_triangles_of_vertices(root.search(x), x) if k >= 0: V = mesh.get_vertex_coordinates(k) # nodes for triangle k assert is_inside_polygon(x, V) assert found is True else: assert found is False # More points for x in [[0.6, 0.3], [0.1, 0.2], [0.7,0.7], [0.1,0.9], [0.4,0.6], [0.9,0.1], [10, 3]]: triangles = root.search(x) #print x, candidate_vertices found, sigma0, sigma1, sigma2, k = \ root._search_triangles_of_vertices(triangles, ensure_numeric(x)) if k >= 0: V = mesh.get_vertex_coordinates(k) # nodes for triangle k assert is_inside_polygon(x, V) assert found is True else: assert found is False
def test_urs_ungridded2sww_mint_maxtII (self): #Zone: 50 #Easting: 240992.578 Northing: 7620442.472 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] time_step_count = 6 time_step = 100 tide = 9000000 base_name, files = self.write_mux(lat_long, time_step_count, time_step) urs_ungridded2sww(base_name, mean_stage=tide, origin =(50,23432,4343), mint=0, maxt=100000) # now I want to check the sww file ... sww_file = base_name + '.sww' #Let's interigate the sww file # Note, the sww info is not gridded. It is point data. fid = NetCDFFile(sww_file) # Make x and y absolute geo_reference = Geo_reference(NetCDFObject=fid) points = geo_reference.get_absolute(map(None, fid.variables['x'][:], fid.variables['y'][:])) points = ensure_numeric(points) x = points[:,0] #Check the time vector times = fid.variables['time'][:] times_actual = [0,100,200,300,400,500] assert num.allclose(ensure_numeric(times), ensure_numeric(times_actual)) #Check first value stage = fid.variables['stage'][:] assert num.allclose(stage[0], x +tide) fid.close() self.delete_mux(files) os.remove(sww_file)
def in_and_outside_polygon(points, polygon, closed=True, verbose=False): """Determine points inside and outside a polygon See separate_points_by_polygon for documentation Returns an array of points inside and array of points outside the polygon """ try: points = ensure_numeric(points, num.float) except NameError, e: raise NameError, e
def greens_law(d1, d2, h1, verbose=False): """Green's Law Green's Law allows an approximation of wave amplitude at a given depth based on the fourh root of the ratio of two depths and the amplitude at another given depth. Note, wave amplitude is equal to stage. Inputs: d1, d2 - the two depths h1 - the wave amplitude at d1 h2 - the derived amplitude at d2 h2 = h1 (d1/d2)^(1/4), where d2 cannot equal 0. """ d1 = ensure_numeric(d1) d2 = ensure_numeric(d2) h1 = ensure_numeric(h1) if d1 <= 0.0: msg = 'the first depth, d1 (%f), must be strictly positive' % (d1) raise Exception(msg) if d2 <= 0.0: msg = 'the second depth, d2 (%f), must be strictly positive' % (d2) raise Exception(msg) if h1 <= 0.0: msg = 'the wave amplitude, h1 (%f), must be strictly positive' % (h1) raise Exception(msg) h2 = h1*(d1/d2)**0.25 assert h2 > 0 return h2
def write_elevation_nc(file_out, lon, lat, depth_vector): """Write an nc elevation file.""" # NetCDF file definition outfile = NetCDFFile(file_out, netcdf_mode_w) #Create new file nc_lon_lat_header(outfile, lon, lat) # ELEVATION zname = 'ELEVATION' outfile.createVariable(zname, precision, (lat_name, lon_name)) outfile.variables[zname].units = 'CENTIMETERS' outfile.variables[zname].missing_value = -1.e+034 outfile.variables[lon_name][:] = ensure_numeric(lon) outfile.variables[lat_name][:] = ensure_numeric(lat) depth = num.reshape(depth_vector, (len(lat), len(lon))) outfile.variables[zname][:] = depth outfile.close()
def outside_polygon(points, polygon, closed = True, verbose = False): """Determine points outside a polygon Functions inside_polygon and outside_polygon have been defined in terms of separate_by_polygon which will put all inside indices in the first part of the indices array and outside indices in the last See separate_points_by_polygon for documentation """ try: points = ensure_numeric(points, num.float) except NameError, e: raise NameError, e
def change_points_geo_ref(self, points, points_geo_ref=None): """Change points to be absolute wrt new georef 'points_geo_ref'. points the points to change points_geo_ref the new georef to make points absolute wrt Returns the changed points data. If the points do not have a georef, assume 'absolute' values. """ import copy # remember if we got a list is_list = isinstance(points, list) points = ensure_numeric(points, num.float) # sanity checks if len(points.shape) == 1: # One point has been passed msg = "Single point must have two elements" assert len(points) == 2, msg points = num.reshape(points, (1, 2)) msg = "Points array must be two dimensional.\n" msg += "I got %d dimensions" % len(points.shape) assert len(points.shape) == 2, msg msg = "Input must be an N x 2 array or list of (x,y) values. " msg += "I got an %d x %d array" % points.shape assert points.shape[1] == 2, msg # FIXME (Ole): Could also check if zone, xllcorner, yllcorner # are identical in the two geo refs. if points_geo_ref is not self: # If georeferences are different points = copy.copy(points) # Don't destroy input if not points_geo_ref is None: # Convert points to absolute coordinates points[:, 0] += points_geo_ref.xllcorner points[:, 1] += points_geo_ref.yllcorner # Make points relative to primary geo reference points[:, 0] -= self.xllcorner points[:, 1] -= self.yllcorner if is_list: points = points.tolist() return points
def point_on_line(point, line, rtol=1.0e-5, atol=1.0e-8): """Determine whether a point is on a line segment Input: point is given by [x, y] line is given by [x0, y0], [x1, y1]] or the equivalent 2x2 numeric array with each row corresponding to a point. Output: Note: Line can be degenerate and function still works to discern coinciding points from non-coinciding. """ point = ensure_numeric(point) line = ensure_numeric(line) res = _point_on_line(point[0], point[1], line[0, 0], line[0, 1], line[1, 0], line[1, 1], rtol, atol) return bool(res)
def _poly_xy(polygon): """ this is used within plot_polygons so need to duplicate the first point so can have closed polygon in plot polygon A set of points defining a polygon. verbose True if this function is to be verbose. Returns a tuple (x, y) of X and Y coordinates of the polygon. We duplicate the first point so can have closed polygon in plot. """ try: polygon = ensure_numeric(polygon, num.float) except NameError, err: raise NameError, err
def search_fast(self, point): """ Find the triangle (element) that the point x is in. Does a coherent quadtree traversal to return a single triangle that the point falls within. The traversal begins at the last triangle found. If this fails, it checks the triangles beneath it in the tree, and then begins traversing up through the tree until it reaches the root. This results in performance which varies between constant time and O(n), depending on the geometry. Inputs: point: The point to test Return: element_found, sigma0, sigma1, sigma2, k where element_found: True if a triangle containing x was found sigma0, sigma1, sigma2: The interpolated values k: Index of triangle (if found) """ # PADARN NOTE: Adding checks on the input point to make sure it is a float. if not hasattr(self, 'root'): self.add_quad_tree() point = ensure_numeric(point, num.float) [found, sigma, index] = fitsmooth.individual_tree_search(self.root, point) if found == 1: element_found = True else: element_found = False return element_found, sigma[0], sigma[1], sigma[2], index
def point_in_polygon(polygon, delta=1e-8): """Return a point inside a given polygon which will be close to the polygon edge. Input: polygon - list of vertices of polygon delta - the square root of 2 * delta is the maximum distance from the polygon points and the returned point. Output: points - a point inside polygon searches in all diagonals and up and down (not left and right). """ polygon = ensure_numeric(polygon) while True: for poly_point in polygon: for x_mult in range(-1, 2): for y_mult in range(-1, 2): pt_x, pt_y = poly_point if pt_x == 0: x_delta = x_mult * delta else: x_delta = pt_x + x_mult*pt_x*delta if pt_y == 0: y_delta = y_mult * delta else: y_delta = pt_y + y_mult*pt_y*delta point = [x_delta, y_delta] if is_inside_polygon(point, polygon, closed=False): return point delta = delta * 0.1
def _build_matrix_AtA_Atz(self, point_coordinates, z=None, attribute_name=None, verbose=False, output='dot'): """Build: AtA m x m interpolation matrix, and, Atz m x a interpolation matrix where, m is the number of basis functions phi_k (one per vertex) a is the number of data attributes This algorithm uses a quad tree data structure for fast binning of data points. If Ata is None, the matrices AtA and Atz are created. This function can be called again and again, with sub-sets of the point coordinates. Call fit to get the results. Preconditions z and points are numeric Point_coordindates and mesh vertices have the same origin. The number of attributes of the data points does not change """ if isinstance(point_coordinates, Geospatial_data): point_coordinates = point_coordinates.get_data_points( absolute=True) # Convert input to numeric arrays if z is not None: z = ensure_numeric(z, num.float) else: msg = 'z not specified' assert isinstance(point_coordinates, Geospatial_data), msg z = point_coordinates.get_attributes(attribute_name) point_coordinates = ensure_numeric(point_coordinates, num.float) npts = len(z) z = num.array(z) # NOTE PADARN : This copy might be needed to # make sure memory is contig - would be better to read in C.. z = z.copy() self.point_count += z.shape[0] zdim = 1 if len(z.shape) != 1: zdim = z.shape[1] [AtA, Atz] = fitsmooth.build_matrix_AtA_Atz_points( self.root.root, self.mesh.number_of_nodes, self.mesh.triangles, num.array(point_coordinates), z, zdim, npts) if verbose and output == 'dot': print('\b.', end=' ') sys.stdout.flush() if zdim == 1: Atz = num.array(Atz[0]) else: Atz = num.array(Atz).transpose() if self.AtA is None and self.Atz is None: self.AtA = AtA self.Atz = Atz else: fitsmooth.combine_partial_AtA_Atz(self.AtA, AtA, self.Atz, Atz, zdim, self.mesh.number_of_nodes)
def interpolate(self, f, point_coordinates=None, start_blocking_len=500000, NODATA_value=NAN, verbose=False, output_centroids=False): """Interpolate mesh data f to determine values, z, at points. f is the data on the mesh vertices. The mesh values representing a smooth surface are assumed to be specified in f. Inputs: f: Vector or array of data at the mesh vertices. If f is an array, interpolation will be done for each column as per underlying matrix-matrix multiplication point_coordinates: Interpolate mesh data to these positions. List of coordinate pairs [x, y] of data points or an nx2 numeric array or a Geospatial_data object If point_coordinates is absent, the points inputted last time this method was called are used, if possible. start_blocking_len: If the # of points is more or greater than this, start blocking Output: Interpolated values at inputted points (z). """ # FIXME (Ole): Why is the interpolation matrix rebuilt everytime the # method is called even if interpolation points are unchanged. # This really should use some kind of caching in cases where # interpolation points are reused. # # This has now been addressed through an attempt in interpolate_block if verbose: log.critical('Build intepolation object') if isinstance(point_coordinates, Geospatial_data): point_coordinates = point_coordinates.get_data_points( absolute=True) # Can I interpolate, based on previous point_coordinates? if point_coordinates is None: if self._A_can_be_reused is True \ and len(self._point_coordinates) < start_blocking_len: z = self._get_point_data_z(f, NODATA_value=NODATA_value, verbose=verbose) elif self._point_coordinates is not None: # if verbose, give warning if verbose: log.critical('WARNING: Recalculating A matrix, ' 'due to blocking.') point_coordinates = self._point_coordinates else: # There are no good point_coordinates. import sys; sys.exit() msg = 'ERROR (interpolate.py): No point_coordinates inputted' raise Exception(msg) if point_coordinates is not None: self._point_coordinates = point_coordinates if len(point_coordinates) < start_blocking_len \ or start_blocking_len == 0: self._A_can_be_reused = True z = self.interpolate_block(f, point_coordinates, NODATA_value=NODATA_value, verbose=verbose, output_centroids=output_centroids) else: # Handle blocking self._A_can_be_reused = False start = 0 # creating a dummy array to concatenate to. f = ensure_numeric(f, num.float) if len(f.shape) > 1: z = num.zeros((0, f.shape[1]), num.int) #array default# else: z = num.zeros((0, ), num.int) #array default# for end in range(start_blocking_len, len(point_coordinates), start_blocking_len): t = self.interpolate_block( f, point_coordinates[start:end], NODATA_value=NODATA_value, verbose=verbose, output_centroids=output_centroids) z = num.concatenate((z, t), axis=0) #??default# start = end end = len(point_coordinates) t = self.interpolate_block(f, point_coordinates[start:end], NODATA_value=NODATA_value, verbose=verbose, output_centroids=output_centroids) z = num.concatenate((z, t), axis=0) #??default# return z
def __init__(self, time, quantities, quantity_names=None, vertex_coordinates=None, triangles=None, interpolation_points=None, time_thinning=1, verbose=False, gauge_neighbour_id=None, output_centroids=False): """Initialise object and build spatial interpolation if required Time_thinning_number controls how many timesteps to use. Only timesteps with index%time_thinning_number == 0 will used, or in other words a value of 3, say, will cause the algorithm to use every third time step. """ from anuga.config import time_format if verbose is True: log.critical('Interpolation_function: input checks') # Check temporal info time = ensure_numeric(time) if not num.alltrue(time[1:] - time[:-1] >= 0): # This message is time consuming to form due to the conversion of msg = 'Time must be a monotonuosly increasing sequence %s' % time raise Exception(msg) # Check if quantities is a single array only if not isinstance(quantities, dict): quantities = ensure_numeric(quantities) quantity_names = ['Attribute'] # Make it a dictionary quantities = {quantity_names[0]: quantities} # Use keys if no names are specified if quantity_names is None: quantity_names = list(quantities.keys()) # Check spatial info if vertex_coordinates is None: self.spatial = False else: # FIXME (Ole): Try ensure_numeric here - # this function knows nothing about georefering. vertex_coordinates = ensure_absolute(vertex_coordinates) if triangles is not None: triangles = ensure_numeric(triangles) self.spatial = True if verbose is True: log.critical('Interpolation_function: thinning by %d' % time_thinning) # Thin timesteps if needed # Note array() is used to make the thinned arrays contiguous in memory self.time = num.array(time[::time_thinning]) for name in quantity_names: if len(quantities[name].shape) == 2: quantities[name] = num.array( quantities[name][::time_thinning, :]) if verbose is True: log.critical('Interpolation_function: precomputing') # Save for use with statistics self.quantities_range = {} for name in quantity_names: q = quantities[name][:].flatten() self.quantities_range[name] = [min(q), max(q)] self.quantity_names = quantity_names self.vertex_coordinates = vertex_coordinates self.interpolation_points = interpolation_points self.index = 0 # Initial time index self.precomputed_values = {} self.centroids = [] # Precomputed spatial interpolation if requested if interpolation_points is not None: #no longer true. sts files have spatial = True but #if self.spatial is False: # raise Exception('Triangles and vertex_coordinates must be specified') # try: self.interpolation_points = \ interpolation_points = ensure_numeric(interpolation_points) except: msg = 'Interpolation points must be an N x 2 numeric array ' \ 'or a list of points\n' msg += 'Got: %s.' % (str(self.interpolation_points)[:60] + '...') raise Exception(msg) # Ensure 'mesh_boundary_polygon' is defined mesh_boundary_polygon = None if triangles is not None and vertex_coordinates is not None: # Check that all interpolation points fall within # mesh boundary as defined by triangles and vertex_coordinates. from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh from anuga.geometry.polygon import outside_polygon # Create temporary mesh object from mesh info passed # into this function. mesh = Mesh(vertex_coordinates, triangles) mesh_boundary_polygon = mesh.get_boundary_polygon() indices = outside_polygon(interpolation_points, mesh_boundary_polygon) # Record result #self.mesh_boundary_polygon = mesh_boundary_polygon self.indices_outside_mesh = indices # Report if len(indices) > 0: msg = 'Interpolation points in Interpolation function fall ' msg += 'outside specified mesh. Offending points:\n' out_interp_pts = [] for i in indices: msg += '%d: %s\n' % (i, interpolation_points[i]) out_interp_pts.append( ensure_numeric(interpolation_points[i])) if verbose is True: import sys from anuga.geometry.polygon import plot_polygons title = ('Interpolation points fall ' 'outside specified mesh') plot_polygons([ mesh_boundary_polygon, interpolation_points, out_interp_pts ], ['line', 'point', 'outside'], figname='points_boundary_out', label=title) # Joaquim Luis suggested this as an Exception, so # that the user can now what the problem is rather than # looking for NaN's. However, NANs are handy as they can # be ignored leaving good points for continued processing. if verbose: log.critical(msg) #raise Exception(msg) elif triangles is None and vertex_coordinates is not None: #jj #Dealing with sts file pass else: raise Exception( 'Sww file function requires both triangles and ' 'vertex_coordinates. sts file file function ' 'requires the latter.') # Plot boundary and interpolation points, # but only if if 'mesh_boundary_polygon' has data. if verbose is True and mesh_boundary_polygon is not None: import sys if sys.platform == 'win32': from anuga.geometry.polygon import plot_polygons title = ('Interpolation function: ' 'Polygon and interpolation points') plot_polygons( [mesh_boundary_polygon, interpolation_points], ['line', 'point'], figname='points_boundary', label=title) m = len(self.interpolation_points) p = len(self.time) for name in quantity_names: self.precomputed_values[name] = num.zeros((p, m), num.float) if verbose is True: log.critical('Build interpolator') # Build interpolator if triangles is not None and vertex_coordinates is not None: if verbose: msg = 'Building interpolation matrix from source mesh ' msg += '(%d vertices, %d triangles)' \ % (vertex_coordinates.shape[0], triangles.shape[0]) log.critical(msg) # This one is no longer needed for STS files interpol = Interpolate(vertex_coordinates, triangles, verbose=verbose) elif triangles is None and vertex_coordinates is not None: if verbose: log.critical('Interpolation from STS file') if verbose: log.critical( 'Interpolating (%d interpolation points, %d timesteps).' % (self.interpolation_points.shape[0], self.time.shape[0])) if time_thinning > 1: log.critical('Timesteps were thinned by a factor of %d' % time_thinning) else: log.critical() for i, t in enumerate(self.time): # Interpolate quantities at this timestep #if verbose and i%((p+10)/10) == 0: if verbose: log.critical(' time step %d of %d' % (i, p)) for name in quantity_names: if len(quantities[name].shape) == 2: Q = quantities[name][i, :] # Quantities at timestep i else: Q = quantities[name][:] # No time dependency #if verbose and i%((p+10)/10) == 0: if verbose: log.critical(' quantity %s, size=%d' % (name, len(Q))) # Interpolate if triangles is not None and vertex_coordinates is not None: result = interpol.interpolate(Q, point_coordinates=\ self.interpolation_points, verbose=False, output_centroids=output_centroids) self.centroids = interpol.centroids elif triangles is None and vertex_coordinates is not None: result = interpolate_polyline(Q, vertex_coordinates, gauge_neighbour_id, interpolation_points=\ self.interpolation_points) #assert len(result), len(interpolation_points) self.precomputed_values[name][i, :] = result # Report if verbose: log.critical(self.statistics()) else: # Store quantitites as is for name in quantity_names: self.precomputed_values[name] = quantities[name]
def _build_interpolation_matrix_A(self, point_coordinates, output_centroids=False, verbose=False): """Build n x m interpolation matrix, where n is the number of data points and m is the number of basis functions phi_k (one per vertex) This algorithm uses a quad tree data structure for fast binning of data points origin is a 3-tuple consisting of UTM zone, easting and northing. If specified coordinates are assumed to be relative to this origin. This one will override any data_origin that may be specified in instance interpolation Preconditions: Point_coordindates and mesh vertices have the same origin. """ if verbose: log.critical('Building interpolation matrix') # Convert point_coordinates to numeric arrays, in case it was a list. point_coordinates = ensure_numeric(point_coordinates, num.float) if verbose: log.critical('Getting indices inside mesh boundary') # Quick test against boundary, but will not deal with holes in the mesh, # that is done below inside_boundary_indices, outside_poly_indices = \ in_and_outside_polygon(point_coordinates, self.mesh.get_boundary_polygon(), closed=True, verbose=verbose) # Build n x m interpolation matrix if verbose and len(outside_poly_indices) > 0: log.critical('WARNING: Points outside mesh boundary.') # Since you can block, throw a warning, not an error. if verbose and 0 == len(inside_boundary_indices): log.critical('WARNING: No points within the mesh!') m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex) n = point_coordinates.shape[0] # Nbr of data points if verbose: log.critical('Number of datapoints: %d' % n) if verbose: log.critical('Number of basis functions: %d' % m) A = Sparse(n, m) n = len(inside_boundary_indices) centroids = [] inside_poly_indices = [] # Compute matrix elements for points inside the mesh if verbose: log.critical('Building interpolation matrix from %d points' % n) for d, i in enumerate(inside_boundary_indices): # For each data_coordinate point if verbose and d % ((n + 10) / 10) == 0: log.critical('Doing %d of %d' % (d, n)) x = point_coordinates[i] element_found, sigma0, sigma1, sigma2, k = self.root.search_fast(x) # Update interpolation matrix A if necessary if element_found is True: #if verbose: # print 'Point is within mesh:', d, i inside_poly_indices.append(i) # Assign values to matrix A j0 = self.mesh.triangles[k, 0] # Global vertex id for sigma0 j1 = self.mesh.triangles[k, 1] # Global vertex id for sigma1 j2 = self.mesh.triangles[k, 2] # Global vertex id for sigma2 js = [j0, j1, j2] if output_centroids is False: # Weight each vertex according to its distance from x sigmas = {j0: sigma0, j1: sigma1, j2: sigma2} for j in js: A[i, j] = sigmas[j] else: # If centroids are needed, weight all 3 vertices equally for j in js: A[i, j] = 1.0 / 3.0 centroids.append(self.mesh.centroid_coordinates[k]) else: if verbose: log.critical( 'Mesh has a hole - moving this point to outside list') # This is a numpy arrays, so we need to do a slow transfer outside_poly_indices = num.append(outside_poly_indices, [i], axis=0) return A, inside_poly_indices, outside_poly_indices, centroids
def __init__(self, domain, end_points, exchange_lines, enquiry_points, invert_elevations, width, height, diameter, z1, z2, blockage, barrels, apron, manning, enquiry_gap, use_momentum_jet, zero_outflow_momentum, use_old_momentum_method, always_use_Q_wetdry_adjustment, force_constant_inlet_elevations, description, label, structure_type, logging, verbose, master_proc=0, procs=None, inlet_master_proc=[0, 0], inlet_procs=None, enquiry_proc=None): self.myid = pypar.rank() self.num_procs = pypar.size() anuga.Operator.__init__(self, domain) # Allocate default processor associations if not specified in arguments # although we assume that such associations are provided correctly by the # parallel_operator_factory. self.master_proc = master_proc self.inlet_master_proc = inlet_master_proc if procs is None: self.procs = [master_proc] else: self.procs = procs if inlet_procs is None: self.inlet_procs = [[inlet_master_proc[0]], [inlet_master_proc[0]]] else: self.inlet_procs = inlet_procs if enquiry_proc is None: self.enquiry_proc = [[inlet_master_proc[0]], [inlet_master_proc[0]]] else: self.enquiry_proc = enquiry_proc self.end_points = ensure_numeric(end_points) self.exchange_lines = ensure_numeric(exchange_lines) self.enquiry_points = ensure_numeric(enquiry_points) self.invert_elevations = ensure_numeric(invert_elevations) assert (width is not None and diameter is None) or (width is None and diameter is not None) if width is None: width = diameter if diameter is None: diameter = width if height is None: height = width if apron is None: apron = width self.width = width self.height = height self.diameter = diameter self.z1 = z1 self.z2 = z2 self.blockage = blockage self.barrels = barrels self.apron = apron self.manning = manning self.enquiry_gap = enquiry_gap self.use_momentum_jet = use_momentum_jet self.zero_outflow_momentum = zero_outflow_momentum if use_momentum_jet and zero_outflow_momentum: msg = "Can't have use_momentum_jet and zero_outflow_momentum both True" raise Exception(msg) self.use_old_momentum_method = use_old_momentum_method self.always_use_Q_wetdry_adjustment = always_use_Q_wetdry_adjustment if description is None: self.description = ' ' else: self.description = description if label is None: self.label = "structure_%g" % Parallel_Structure_operator.counter + "_P" + str( self.myid) else: self.label = label + '_%g' % Parallel_Structure_operator.counter + "_P" + str( self.myid) if structure_type is None: self.structure_type = 'generic structure' else: self.structure_type = structure_type self.verbose = verbose # Keep count of structures if self.myid == master_proc: Parallel_Structure_operator.counter += 1 # Slots for recording current statistics self.accumulated_flow = 0.0 self.discharge = 0.0 self.discharge_abs_timemean = 0.0 self.velocity = 0.0 self.outlet_depth = 0.0 self.delta_total_energy = 0.0 self.driving_energy = 0.0 if exchange_lines is not None: self.__process_skew_culvert() elif end_points is not None: self.__process_non_skew_culvert() else: raise Exception, 'Define either exchange_lines or end_points' self.inlets = [] # Allocate parallel inlet enquiry, assign None if processor is not associated with particular # inlet. if self.myid in self.inlet_procs[0]: line0 = self.exchange_lines[0] if self.apron is None: poly0 = line0 else: offset = -self.apron * self.outward_vector_0 poly0 = num.array( [line0[0], line0[1], line0[1] + offset, line0[0] + offset]) if self.invert_elevations is None: invert_elevation0 = None else: invert_elevation0 = self.invert_elevations[0] enquiry_point0 = self.enquiry_points[0] outward_vector0 = self.culvert_vector self.inlets.append( parallel_inlet_enquiry.Parallel_Inlet_enquiry( self.domain, line0, enquiry_point0, invert_elevation=invert_elevation0, outward_culvert_vector=outward_vector0, master_proc=self.inlet_master_proc[0], procs=self.inlet_procs[0], enquiry_proc=self.enquiry_proc[0], verbose=self.verbose)) if force_constant_inlet_elevations: # Try to enforce a constant inlet elevation inlet_global_elevation = self.inlets[ -1].get_global_average_elevation() self.inlets[-1].set_elevations(inlet_global_elevation) else: self.inlets.append(None) if self.myid in self.inlet_procs[1]: line1 = self.exchange_lines[1] if self.apron is None: poly1 = line1 else: offset = -self.apron * self.outward_vector_1 poly1 = num.array( [line1[0], line1[1], line1[1] + offset, line1[0] + offset]) if self.invert_elevations is None: invert_elevation1 = None else: invert_elevation1 = self.invert_elevations[1] enquiry_point1 = self.enquiry_points[1] outward_vector1 = -self.culvert_vector self.inlets.append( parallel_inlet_enquiry.Parallel_Inlet_enquiry( self.domain, line1, enquiry_point1, invert_elevation=invert_elevation1, outward_culvert_vector=outward_vector1, master_proc=self.inlet_master_proc[1], procs=self.inlet_procs[1], enquiry_proc=self.enquiry_proc[1], verbose=self.verbose)) if force_constant_inlet_elevations: # Try to enforce a constant inlet elevation inlet_global_elevation = self.inlets[ -1].get_global_average_elevation() self.inlets[-1].set_elevations(inlet_global_elevation) else: self.inlets.append(None) self.inflow_index = 0 self.outflow_index = 1 self.set_parallel_logging(logging)
def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False, mint=None, maxt=None, mean_stage=0, origin=None, hole_points_UTM=None, zscale=1): """ Convert URS C binary format for wave propagation to sww format native to abstract_2d_finite_volumes. Specify only basename_in and read files of the form basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing relative height, x-velocity and y-velocity, respectively. Also convert latitude and longitude to UTM. All coordinates are assumed to be given in the GDA94 datum. The latitude and longitude information is assumed ungridded grid. min's and max's: If omitted - full extend is used. To include a value min ans max may equal it. Lat and lon are assumed to be in decimal degrees. origin is a 3-tuple with geo referenced UTM coordinates (zone, easting, northing) It will be the origin of the sww file. This shouldn't be used, since all of anuga should be able to handle an arbitary origin. The mux point info is NOT relative to this origin. URS C binary format has data organised as TIME, LONGITUDE, LATITUDE which means that latitude is the fastest varying dimension (row major order, so to speak) In URS C binary the latitudes and longitudes are in assending order. Note, interpolations of the resulting sww file will be different from results of urs2sww. This is due to the interpolation function used, and the different grid structure between urs2sww and this function. Interpolating data that has an underlying gridded source can easily end up with different values, depending on the underlying mesh. consider these 4 points 50 -50 0 0 The grid can be - |\| A - or; - |/| B - If a point is just below the center of the midpoint, it will have a +ve value in grid A and a -ve value in grid B. """ from anuga.mesh_engine.mesh_engine import NoTrianglesError from anuga.pmesh.mesh import Mesh files_in = [ basename_in + WAVEHEIGHT_MUX_LABEL, basename_in + EAST_VELOCITY_LABEL, basename_in + NORTH_VELOCITY_LABEL ] quantities = ['HA', 'UA', 'VA'] # instantiate urs_points of the three mux files. mux = {} for quantity, file in map(None, quantities, files_in): mux[quantity] = Read_urs(file) # Could check that the depth is the same. (hashing) # handle to a mux file to do depth stuff a_mux = mux[quantities[0]] # Convert to utm lat = a_mux.lonlatdep[:, 1] long = a_mux.lonlatdep[:, 0] points_utm, zone = convert_from_latlon_to_utm(latitudes=lat, longitudes=long) elevation = a_mux.lonlatdep[:, 2] * -1 # grid (create a mesh from the selected points) # This mesh has a problem. Triangles are streched over ungridded areas. # If these areas could be described as holes in pmesh, that would be great. # I can't just get the user to selection a point in the middle. # A boundary is needed around these points. # But if the zone of points is obvious enough auto-segment should do # a good boundary. mesh = Mesh() mesh.add_vertices(points_utm) mesh.auto_segment(smooth_indents=True, expand_pinch=True) # To try and avoid alpha shape 'hugging' too much mesh.auto_segment(mesh.shape.get_alpha() * 1.1) if hole_points_UTM is not None: point = ensure_absolute(hole_points_UTM) mesh.add_hole(point[0], point[1]) try: mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False) except NoTrianglesError: # This is a bit of a hack, going in and changing the data structure. mesh.holes = [] mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False) mesh_dic = mesh.Mesh2MeshList() #mesh.export_mesh_file(basename_in + '_168.tsh') #import sys; sys.exit() # These are the times of the mux file mux_times = [] for i in range(a_mux.time_step_count): mux_times.append(a_mux.time_step * i) (mux_times_start_i, mux_times_fin_i) = read_time_from_mux(mux_times, mint, maxt) times = mux_times[mux_times_start_i:mux_times_fin_i] if mux_times_start_i == mux_times_fin_i: # Close the mux files for quantity, file in map(None, quantities, files_in): mux[quantity].close() msg = "Due to mint and maxt there's no time info in the boundary SWW." raise Exception(msg) # If this raise is removed there is currently no downstream errors points_utm = ensure_numeric(points_utm) assert num.alltrue( ensure_numeric(mesh_dic['generatedpointlist']) == ensure_numeric( points_utm)) volumes = mesh_dic['generatedtrianglelist'] # Write sww intro and grid stuff. if basename_out is None: swwname = basename_in + '.sww' else: swwname = basename_out + '.sww' if verbose: log.critical('Output to %s' % swwname) outfile = NetCDFFile(swwname, netcdf_mode_w) # For a different way of doing this, check out tsh2sww # work out sww_times and the index range this covers sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) sww.store_header(outfile, times, len(volumes), len(points_utm), verbose=verbose, sww_precision=netcdf_float) outfile.mean_stage = mean_stage outfile.zscale = zscale sww.store_triangulation(outfile, points_utm, volumes, zone, new_origin=origin, verbose=verbose) sww.store_static_quantities(outfile, elevation=elevation) if verbose: log.critical('Converting quantities') # Read in a time slice from each mux file and write it to the SWW file j = 0 for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']): if j >= mux_times_start_i and j < mux_times_fin_i: stage = zscale * ha + mean_stage h = stage - elevation xmomentum = ua * h ymomentum = -1 * va * h # -1 since in mux files south is positive. sww.store_quantities(outfile, slice_index=j - mux_times_start_i, verbose=verbose, stage=stage, xmomentum=xmomentum, ymomentum=ymomentum, sww_precision=num.float) j += 1 if verbose: sww.verbose_quantities(outfile) outfile.close()
def __init__(self, vertex_coordinates=None, triangles=None, mesh=None, mesh_origin=None, alpha=None, verbose=False, cg_precon='Jacobi', use_c_cg=True): """ Padarn Note 05/12/12: This documentation should probably be updated to account for the fact that the fitting is now done in C. I wasn't sure what details were necessary though. Fit data at points to the vertices of a mesh. Inputs: vertex_coordinates: List of coordinate pairs [xi, eta] of points constituting a mesh (or an m x 2 numeric array or a geospatial object) Points may appear multiple times (e.g. if vertices have discontinuities) triangles: List of 3-tuples (or a numeric array) of integers representing indices of all vertices in the mesh. mesh_origin: A geo_reference object or 3-tuples consisting of UTM zone, easting and northing. If specified vertex coordinates are assumed to be relative to their respective origins. Note: Don't supply a vertex coords as a geospatial object and a mesh origin, since geospatial has its own mesh origin. Usage, To use this in a blocking way, call build_fit_subset, with z info, and then fit, with no point coord, z info. """ # Initialise variabels if alpha is None: self.alpha = DEFAULT_ALPHA else: self.alpha = alpha FitInterpolate.__init__(self, vertex_coordinates, triangles, mesh, mesh_origin=mesh_origin, verbose=verbose) self.AtA = None self.Atz = None self.D = None self.point_count = 0 # NOTE PADARN: NEEDS FIXING - currently need smoothing matrix # even if alpha is zero, due to C function expecting it. This # could and should be removed. if True: if verbose: log.critical('Building smoothing matrix') self.D = self._build_smoothing_matrix_D() bd_poly = self.mesh.get_boundary_polygon() self.mesh_boundary_polygon = ensure_numeric(bd_poly) self.cg_precon = cg_precon self.use_c_cg = use_c_cg
def separate_points_by_polygon(points, polygon, closed=True, check_input=True, verbose=False): """Determine whether points are inside or outside a polygon Input: points - Tuple of (x, y) coordinates, or list of tuples polygon - list of vertices of polygon closed - (optional) determine whether points on boundary should be regarded as belonging to the polygon (closed = True) or not (closed = False) check_input: Allows faster execution if set to False Outputs: indices: array of same length as points with indices of points falling inside the polygon listed from the beginning and indices of points falling outside listed from the end. count: count of points falling inside the polygon The indices of points inside are obtained as indices[:count] The indices of points outside are obtained as indices[count:] Examples: U = [[0,0], [1,0], [1,1], [0,1]] #Unit square separate_points_by_polygon( [[0.5, 0.5], [1, -0.5], [0.3, 0.2]], U) will return the indices [0, 2, 1] and count == 2 as only the first and the last point are inside the unit square Remarks: The vertices may be listed clockwise or counterclockwise and the first point may optionally be repeated. Polygons do not need to be convex. Polygons can have holes in them and points inside a hole is regarded as being outside the polygon. Algorithm is based on work by Darel Finley, http://www.alienryderflex.com/polygon/ Uses underlying C-implementation in polygon_ext.c """ if check_input: # Input checks assert isinstance(closed, bool), \ 'Keyword argument "closed" must be boolean' assert isinstance(verbose, bool), \ 'Keyword argument "verbose" must be boolean' try: points = ensure_numeric(points, num.float) except NameError as e: raise_(NameError, e) except: msg = 'Points could not be converted to numeric array' raise Exception(msg) try: polygon = ensure_numeric(polygon, num.float) except NameError as e: raise NameError(e) except: msg = 'Polygon could not be converted to numeric array' raise Exception(msg) msg = 'Polygon array must be a 2d array of vertices' assert len(polygon.shape) == 2, msg msg = 'Polygon array must have two columns' assert polygon.shape[1] == 2, msg msg = ('Points array must be 1 or 2 dimensional. ' 'I got %d dimensions' % len(points.shape)) assert 0 < len(points.shape) < 3, msg if len(points.shape) == 1: # Only one point was passed in. Convert to array of points. points = num.reshape(points, (1, 2)) msg = ('Point array must have two columns (x,y), ' 'I got points.shape[1]=%d' % points.shape[0]) assert points.shape[1] == 2, msg msg = ('Points array must be a 2d array. I got %s.' % str(points[:30])) assert len(points.shape) == 2, msg msg = 'Points array must have two columns' assert points.shape[1] == 2, msg N = polygon.shape[0] # Number of vertices in polygon M = points.shape[0] # Number of points indices = num.zeros(M, num.int) count = _separate_points_by_polygon(points, polygon, indices, int(closed), int(verbose)) if verbose: log.critical('Found %d points (out of %d) inside polygon' % (count, M)) return indices, count
def Boyd_pipe_operator(domain, losses, diameter, blockage=0.0, barrels=1.0, end_points=None, exchange_lines=None, enquiry_points=None, invert_elevations=None, apron=0.1, manning=0.013, enquiry_gap=0.0, smoothing_timescale=0.0, use_momentum_jet=True, use_velocity_head=True, description=None, label=None, structure_type='boyd_pipe', logging=False, verbose=False, master_proc=0, procs=None): # If not parallel domain then allocate serial Boyd box operator if isinstance(domain, Parallel_domain) is False: if verbose: print("Allocating non parallel boyd pipe operator .....") return anuga.structures.boyd_pipe_operator.Boyd_pipe_operator(domain=domain, losses=losses, diameter=diameter, blockage=blockage, barrels=barrels, end_points=end_points, exchange_lines=exchange_lines, enquiry_points=enquiry_points, invert_elevations=invert_elevations, apron=apron, manning=manning, enquiry_gap=enquiry_gap, smoothing_timescale=smoothing_timescale, use_momentum_jet=use_momentum_jet, use_velocity_head=use_velocity_head, description=description, label=label, structure_type=structure_type, logging=logging, verbose=verbose) from anuga.utilities import parallel_abstraction as pypar if procs is None: procs = list(range(0,pypar.size())) myid = pypar.rank() end_points = ensure_numeric(end_points) exchange_lines = ensure_numeric(exchange_lines) enquiry_points = ensure_numeric(enquiry_points) width = diameter assert diameter is not None if apron is None: apron = width # Calculate location of inlet enquiry points and exchange lines if myid == master_proc: if exchange_lines is not None: exchange_lines_tmp = exchange_lines enquiry_points_tmp = __process_skew_culvert(exchange_lines, end_points, enquiry_points, apron, enquiry_gap) for i in procs: if i == master_proc: continue pypar.send(enquiry_points_tmp, i) elif end_points is not None: exchange_lines_tmp, enquiry_points_tmp = __process_non_skew_culvert(end_points, width, enquiry_points, apron, enquiry_gap) for i in procs: if i == master_proc: continue pypar.send(exchange_lines_tmp, i) pypar.send(enquiry_points_tmp, i) else: raise Exception('Define either exchange_lines or end_points') else: if exchange_lines is not None: exchange_lines_tmp = exchange_lines enquiry_points_tmp = pypar.receive(master_proc) elif end_points is not None: exchange_lines_tmp = pypar.receive(master_proc) enquiry_points_tmp = pypar.receive(master_proc) # Determine processors associated with first inlet line0 = exchange_lines_tmp[0] enquiry_point0 = enquiry_points_tmp[0] alloc0, inlet0_master_proc, inlet0_procs, enquiry0_proc = allocate_inlet_procs(domain, line0, enquiry_point = enquiry_point0, master_proc = master_proc, procs = procs, verbose=verbose) # Determine processors associated with second inlet line1 = exchange_lines_tmp[1] enquiry_point1 = enquiry_points_tmp[1] alloc1, inlet1_master_proc, inlet1_procs, enquiry1_proc = allocate_inlet_procs(domain, line1, enquiry_point = enquiry_point1, master_proc = master_proc, procs = procs, verbose=verbose) structure_procs = list(set(inlet0_procs + inlet1_procs)) inlet_master_proc = [inlet0_master_proc, inlet1_master_proc] inlet_procs = [inlet0_procs, inlet1_procs] enquiry_proc = [enquiry0_proc, enquiry1_proc] if myid == master_proc and verbose: print("Parallel Boyd Pipe Operator =============================") print("Structure Master Proc is P" + str(inlet0_master_proc)) print("Structure Procs are P" + str(structure_procs)) print("Inlet Master Procs are P" + str(inlet_master_proc)) print("Inlet Procs are P" + str(inlet_procs[0]) + " and " + str(inlet_procs[1])) print("Inlet Enquiry Procs are P" + str(enquiry_proc)) print("Enquiry Points are " + str(enquiry_point0) + " and " + str(enquiry_point1)) print("Inlet Exchange Lines are " + str(line0) + " and " + str(line1)) print("========================================================") if alloc0 or alloc1: return Parallel_Boyd_pipe_operator(domain=domain, losses=losses, diameter=diameter, blockage=blockage, barrels=barrels, end_points=end_points, exchange_lines=exchange_lines, enquiry_points=enquiry_points, invert_elevations=invert_elevations, apron=apron, manning=manning, enquiry_gap=enquiry_gap, smoothing_timescale=smoothing_timescale, use_momentum_jet=use_momentum_jet, use_velocity_head=use_velocity_head, description=description, label=label, structure_type=structure_type, logging=logging, verbose=verbose, master_proc = inlet0_master_proc, procs = structure_procs, inlet_master_proc = inlet_master_proc, inlet_procs = inlet_procs, enquiry_proc = enquiry_proc) else: return None
def read_mux2_py(filenames, weights=None, permutation=None, verbose=False): """Access the mux files specified in the filenames list. Combine the data found therin as a weighted linear sum as specifed by the weights. If permutation is None or empty extract timeseries data for all gauges within the files. Input: filenames: List of filenames specifiying the file containing the timeseries data (mux2 format) for each source weights: Weighs associated with each source (defaults to 1 for each source) permutation: Specifies the gauge numbers that for which data is to be extracted """ from .urs_ext import read_mux2 numSrc = len(filenames) file_params = -1 * num.ones(3, num.float) # [nsta,dt,nt] # Convert verbose to int C flag if verbose: verbose = 1 else: verbose = 0 if weights is None: weights = num.ones(numSrc) if permutation is None: permutation = ensure_numeric([], num.int) # Call underlying C implementation urs2sts_ext.c cast_filenames = [] for filename in filenames: cast_filenames.append(str(filename).encode()) data = read_mux2(numSrc, cast_filenames, weights, file_params, permutation, verbose) msg = 'File parameter values were not read in correctly from c file' assert len(num.compress(file_params > 0, file_params)) != 0, msg msg = 'The number of stations specifed in the c array and in the file ' \ 'are inconsistent' assert file_params[0] >= len(permutation), msg msg = 'The number of stations returned is inconsistent with ' \ 'the requested number' assert len(permutation) == 0 or len(permutation) == data.shape[0], msg nsta = int(file_params[0]) msg = 'Must have at least one station' assert nsta > 0, msg dt = file_params[1] msg = 'Must have a postive timestep' assert dt > 0, msg nt = int(file_params[2]) msg = 'Must have at least one gauge value' assert nt > 0, msg OFFSET = 5 # Number of site parameters p passed back with data # p = [geolat,geolon,depth,start_tstep,finish_tstep] # FIXME (Ole): What is the relationship with params and data.shape ? # It looks as if the following asserts should pass but they don't always # #msg = 'nt = %d, data.shape[1] == %d' %(nt, data.shape[1]) #assert nt == data.shape[1] - OFFSET, msg # #msg = 'nsta = %d, data.shape[0] == %d' %(nsta, data.shape[0]) #assert nsta == data.shape[0], msg # Number of stations in ordering file number_of_selected_stations = data.shape[0] # Index where data ends and parameters begin parameters_index = data.shape[1] - OFFSET times = dt * num.arange(parameters_index) latitudes = num.zeros(number_of_selected_stations, num.float) longitudes = num.zeros(number_of_selected_stations, num.float) elevation = num.zeros(number_of_selected_stations, num.float) quantity = num.zeros((number_of_selected_stations, parameters_index), \ num.float) starttime = 1e16 for i in range(number_of_selected_stations): quantity[i][:] = data[i][:parameters_index] latitudes[i] = data[i][parameters_index] longitudes[i] = data[i][parameters_index + 1] elevation[i] = -data[i][parameters_index + 2] first_time_step = data[i][parameters_index + 3] starttime = min(dt * first_time_step, starttime) return times, latitudes, longitudes, elevation, quantity, starttime
def Internal_boundary_operator(domain, internal_boundary_function, width=1., height=1., end_points=None, exchange_lines=None, enquiry_points=None, invert_elevation=None, apron=0.0, enquiry_gap=0.0, use_velocity_head=False, zero_outflow_momentum=False, force_constant_inlet_elevations=True, smoothing_timescale=0.0, compute_discharge_implicitly=True, description=None, label=None, structure_type='internal_boundary', logging=False, verbose=True, master_proc=0, procs=None, inlet_master_proc=[0, 0], inlet_procs=None, enquiry_proc=[0, 0]): # If not parallel domain then allocate serial Internal boundary operator if isinstance(domain, Parallel_domain) is False: if verbose: print "Allocating non parallel internal_boundary operator ....." return anuga.structures.internal_boundary_operator.Internal_boundary_operator( domain=domain, internal_boundary_function=internal_boundary_function, width=width, height=height, end_points=end_points, exchange_lines=exchange_lines, enquiry_points=enquiry_points, invert_elevation=invert_elevation, apron=apron, enquiry_gap=enquiry_gap, use_velocity_head=use_velocity_head, zero_outflow_momentum=zero_outflow_momentum, force_constant_inlet_elevations=force_constant_inlet_elevations, smoothing_timescale=smoothing_timescale, compute_discharge_implicitly=compute_discharge_implicitly, description=description, label=label, structure_type=structure_type, logging=logging, verbose=verbose) import pypar if procs is None: procs = range(0, pypar.size()) myid = pypar.rank() end_points = ensure_numeric(end_points) exchange_lines = ensure_numeric(exchange_lines) enquiry_points = ensure_numeric(enquiry_points) if height is None: height = width diameter = None if apron is None: apron = width # Calculate location of inlet enquiry points and exchange lines if myid == master_proc: if exchange_lines is not None: exchange_lines_tmp = exchange_lines enquiry_points_tmp = __process_skew_culvert( exchange_lines, end_points, enquiry_points, apron, enquiry_gap) for i in procs: if i == master_proc: continue pypar.send(enquiry_points_tmp, i) elif end_points is not None: exchange_lines_tmp, enquiry_points_tmp = __process_non_skew_culvert( end_points, width, enquiry_points, apron, enquiry_gap) for i in procs: if i == master_proc: continue pypar.send(exchange_lines_tmp, i) pypar.send(enquiry_points_tmp, i) else: raise Exception, 'Define either exchange_lines or end_points' else: if exchange_lines is not None: exchange_lines_tmp = exchange_lines enquiry_points_tmp = pypar.receive(master_proc) elif end_points is not None: exchange_lines_tmp = pypar.receive(master_proc) enquiry_points_tmp = pypar.receive(master_proc) # Determine processors associated with first inlet line0 = exchange_lines_tmp[0] enquiry_point0 = enquiry_points_tmp[0] alloc0, inlet0_master_proc, inlet0_procs, enquiry0_proc = allocate_inlet_procs( domain, line0, enquiry_point=enquiry_point0, master_proc=master_proc, procs=procs, verbose=verbose) # Determine processors associated with second inlet line1 = exchange_lines_tmp[1] enquiry_point1 = enquiry_points_tmp[1] alloc1, inlet1_master_proc, inlet1_procs, enquiry1_proc = allocate_inlet_procs( domain, line1, enquiry_point=enquiry_point1, master_proc=master_proc, procs=procs, verbose=verbose) structure_procs = list(set(inlet0_procs + inlet1_procs)) inlet_master_proc = [inlet0_master_proc, inlet1_master_proc] inlet_procs = [inlet0_procs, inlet1_procs] enquiry_proc = [enquiry0_proc, enquiry1_proc] if myid == master_proc and verbose: print "Parallel Internal boundary Operator =============================" print "Structure Master Proc is P" + str(inlet0_master_proc) print "Structure Procs are P" + str(structure_procs) print "Inlet Master Procs are P" + str(inlet_master_proc) print "Inlet Procs are P" + str(inlet_procs[0]) + " and " + str( inlet_procs[1]) print "Inlet Enquiry Procs are P" + str(enquiry_proc) print "Enquiry Points are " + str(enquiry_point0) + " and " + str( enquiry_point1) print "Inlet Exchange Lines are " + str(line0) + " and " + str(line1) print "========================================================" if alloc0 or alloc1: return Parallel_Internal_boundary_operator( domain=domain, internal_boundary_function=internal_boundary_function, width=width, height=height, end_points=end_points, exchange_lines=exchange_lines, enquiry_points=enquiry_points, invert_elevation=invert_elevation, apron=apron, enquiry_gap=enquiry_gap, use_velocity_head=use_velocity_head, zero_outflow_momentum=zero_outflow_momentum, force_constant_inlet_elevations=force_constant_inlet_elevations, smoothing_timescale=smoothing_timescale, compute_discharge_implicitly=compute_discharge_implicitly, description=description, label=label, structure_type=structure_type, logging=logging, verbose=verbose, master_proc=inlet0_master_proc, procs=structure_procs, inlet_master_proc=inlet_master_proc, inlet_procs=inlet_procs, enquiry_proc=enquiry_proc) else: return None
def test_file_boundary_stsIV_sinewave_ordering(self): """test_file_boundary_stsIV_sinewave_ordering(self): Read correct points from ordering file and apply sts to boundary This one uses a sine wave and compares to time boundary """ lat_long_points=[[6.01, 97.0], [6.02, 97.0], [6.05, 96.9], [6.0, 97.0]] bounding_polygon=[[6.0, 97.0], [6.01, 97.0], [6.02,97.0], \ [6.02,97.02], [6.00,97.02]] tide = 0.35 time_step_count = 50 time_step = 0.1 times_ref = num.arange(0, time_step_count*time_step, time_step) n=len(lat_long_points) first_tstep=num.ones(n,num.int) last_tstep=(time_step_count)*num.ones(n,num.int) gauge_depth=20*num.ones(n,num.float) ha1=num.ones((n,time_step_count),num.float) ua1=3.*num.ones((n,time_step_count),num.float) va1=2.*num.ones((n,time_step_count),num.float) for i in range(n): ha1[i]=num.sin(times_ref) base_name, files = self.write_mux2(lat_long_points, time_step_count, time_step, first_tstep, last_tstep, depth=gauge_depth, ha=ha1, ua=ua1, va=va1) # Write order file file_handle, order_base_name = tempfile.mkstemp("") os.close(file_handle) os.remove(order_base_name) d="," order_file=order_base_name+'order.txt' fid=open(order_file,'w') # Write Header header='index, longitude, latitude\n' fid.write(header) indices=[3,0,1] for i in indices: line=str(i)+d+str(lat_long_points[i][1])+d+\ str(lat_long_points[i][0])+"\n" fid.write(line) fid.close() sts_file=base_name urs2sts(base_name, basename_out=sts_file, ordering_filename=order_file, mean_stage=tide, verbose=False) self.delete_mux(files) # Now read the sts file and check that values have been stored correctly. fid = NetCDFFile(sts_file + '.sts') # Check the time vector times = fid.variables['time'][:] #print times # Check sts quantities stage = fid.variables['stage'][:] xmomentum = fid.variables['xmomentum'][:] ymomentum = fid.variables['ymomentum'][:] elevation = fid.variables['elevation'][:] # Create beginnings of boundary polygon based on sts_boundary boundary_polygon = create_sts_boundary(base_name) os.remove(order_file) # Append the remaining part of the boundary polygon to be defined by # the user bounding_polygon_utm=[] for point in bounding_polygon: zone,easting,northing=redfearn(point[0],point[1]) bounding_polygon_utm.append([easting,northing]) boundary_polygon.append(bounding_polygon_utm[3]) boundary_polygon.append(bounding_polygon_utm[4]) #print 'boundary_polygon', boundary_polygon plot=False if plot: from pylab import plot,show,axis boundary_polygon=ensure_numeric(boundary_polygon) bounding_polygon_utm=ensure_numeric(bounding_polygon_utm) #plot(lat_long_points[:,0],lat_long_points[:,1],'o') plot(boundary_polygon[:,0], boundary_polygon[:,1]) plot(bounding_polygon_utm[:,0],bounding_polygon_utm[:,1]) show() assert num.allclose(bounding_polygon_utm,boundary_polygon) extent_res=1000000 meshname = 'urs_test_mesh' + '.tsh' interior_regions=None boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} # have to change boundary tags from last example because now bounding # polygon starts in different place. create_mesh_from_regions(boundary_polygon, boundary_tags=boundary_tags, maximum_triangle_area=extent_res, filename=meshname, interior_regions=interior_regions, verbose=False) domain_fbound = Domain(meshname) domain_fbound.set_quantity('stage', tide) Bf = File_boundary(sts_file+'.sts', domain_fbound, boundary_polygon=boundary_polygon) Br = Reflective_boundary(domain_fbound) domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br}) finaltime=time_step*(time_step_count-1) yieldstep=time_step temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float) for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep, finaltime=finaltime, skip_initial_step=False)): temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2] domain_time = Domain(meshname) domain_time.set_quantity('stage', tide) Br = Reflective_boundary(domain_time) Bw = Time_boundary(domain=domain_time, function=lambda t: [num.sin(t)+tide,3.*(20.+num.sin(t)+tide),2.*(20.+num.sin(t)+tide)]) domain_time.set_boundary({'ocean': Bw,'otherocean': Br}) temp_time=num.zeros(int(finaltime/yieldstep)+1,num.float) domain_time.set_starttime(domain_fbound.get_starttime()) for i, t in enumerate(domain_time.evolve(yieldstep=yieldstep, finaltime=finaltime, skip_initial_step=False)): temp_time[i]=domain_time.quantities['stage'].centroid_values[2] assert num.allclose(temp_fbound, temp_time) assert num.allclose(domain_fbound.quantities['stage'].vertex_values, domain_time.quantities['stage'].vertex_values) assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values, domain_time.quantities['xmomentum'].vertex_values) assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values, domain_time.quantities['ymomentum'].vertex_values) try: os.remove(sts_file+'.sts') except: # Windoze can't remove this file for some reason pass os.remove(meshname)
def store_header(self, outfile, times, number_of_points, description='Converted from URS mux2 format', sts_precision=netcdf_float32, verbose=False): """Write a header to the underlying data file. outfile handle to open file to write times list of the time slice times *or* a start time number_of_points the number of URS gauge sites description description string to write into the STS file sts_precision format of data to write (netcdf constant ONLY) verbose True if this function is to be verbose If 'times' is a list, the info will be made relative. """ outfile.institution = 'Geoscience Australia' outfile.description = description try: revision_number = get_revision_number() except: revision_number = None # Allow None to be stored as a string outfile.revision_number = str(revision_number) # Start time in seconds since the epoch (midnight 1/1/1970) # This is being used to seperate one number from a list. # what it is actually doing is sorting lists from numeric arrays. if isinstance(times, (list, num.ndarray)): number_of_times = len(times) times = ensure_numeric(times) if number_of_times == 0: starttime = 0 else: starttime = times[0] times = times - starttime #Store relative times else: number_of_times = 0 starttime = times outfile.starttime = starttime # Dimension definitions outfile.createDimension('number_of_points', number_of_points) outfile.createDimension('number_of_timesteps', number_of_times) outfile.createDimension('numbers_in_range', 2) # Variable definitions outfile.createVariable('permutation', netcdf_int, ('number_of_points',)) outfile.createVariable('x', sts_precision, ('number_of_points',)) outfile.createVariable('y', sts_precision, ('number_of_points',)) outfile.createVariable('elevation', sts_precision, \ ('number_of_points',)) q = 'elevation' outfile.createVariable(q + Write_sts.RANGE, sts_precision, ('numbers_in_range',)) # Initialise ranges with small and large sentinels. # If this was in pure Python we could have used None sensibly outfile.variables[q + Write_sts.RANGE][0] = max_float # Min outfile.variables[q + Write_sts.RANGE][1] = -max_float # Max self.write_dynamic_quantities(outfile, Write_sts.sts_quantities, times)
def store_header(self, outfile, times, number_of_points, description='Converted from URS mux2 format', sts_precision=netcdf_float32, verbose=False): """Write a header to the underlying data file. outfile handle to open file to write times list of the time slice times *or* a start time number_of_points the number of URS gauge sites description description string to write into the STS file sts_precision format of data to write (netcdf constant ONLY) verbose True if this function is to be verbose If 'times' is a list, the info will be made relative. """ outfile.institution = 'Geoscience Australia' outfile.description = description try: revision_number = get_revision_number() except: revision_number = None # Allow None to be stored as a string outfile.revision_number = str(revision_number) # Start time in seconds since the epoch (midnight 1/1/1970) # This is being used to seperate one number from a list. # what it is actually doing is sorting lists from numeric arrays. if isinstance(times, (list, num.ndarray)): number_of_times = len(times) times = ensure_numeric(times) if number_of_times == 0: starttime = 0 else: starttime = times[0] times = times - starttime #Store relative times else: number_of_times = 0 starttime = times outfile.starttime = starttime # Dimension definitions outfile.createDimension('number_of_points', number_of_points) outfile.createDimension('number_of_timesteps', number_of_times) outfile.createDimension('numbers_in_range', 2) # Variable definitions outfile.createVariable('permutation', netcdf_int, ('number_of_points', )) outfile.createVariable('x', sts_precision, ('number_of_points', )) outfile.createVariable('y', sts_precision, ('number_of_points', )) outfile.createVariable('elevation', sts_precision, \ ('number_of_points',)) q = 'elevation' outfile.createVariable(q + Write_sts.RANGE, sts_precision, ('numbers_in_range', )) # Initialise ranges with small and large sentinels. # If this was in pure Python we could have used None sensibly outfile.variables[q + Write_sts.RANGE][0] = max_float # Min outfile.variables[q + Write_sts.RANGE][1] = -max_float # Max self.write_dynamic_quantities(outfile, Write_sts.sts_quantities, times)
def timefile2netcdf(file_text, file_out = None, quantity_names=None, \ time_as_seconds=False): """Template for converting typical text files with time series to NetCDF tms file. The file format is assumed to be either two fields separated by a comma: time [DD/MM/YY hh:mm:ss], value0 value1 value2 ... E.g 31/08/04 00:00:00, 1.328223 0 0 31/08/04 00:15:00, 1.292912 0 0 or time (seconds), value0 value1 value2 ... 0.0, 1.328223 0 0 0.1, 1.292912 0 0 will provide a time dependent function f(t) with three attributes filename is assumed to be the rootname with extensions .txt/.tms and .sww """ import time, calendar from anuga.config import time_format from anuga.utilities.numerical_tools import ensure_numeric if file_text[-4:] != '.txt': raise IOError('Input file %s should be of type .txt.' % file_text) if file_out is None: file_out = file_text[:-4] + '.tms' fid = open(file_text) line = fid.readline() fid.close() fields = line.split(',') msg = "File %s must have the format 'datetime, value0 value1 value2 ...'" \ % file_text assert len(fields) == 2, msg if not time_as_seconds: try: starttime = calendar.timegm(time.strptime(fields[0], time_format)) except ValueError: msg = 'First field in file %s must be' % file_text msg += ' date-time with format %s.\n' % time_format msg += 'I got %s instead.' % fields[0] raise DataTimeError, msg else: try: starttime = float(fields[0]) except Exception: msg = "Bad time format" raise DataTimeError, msg # Split values values = [] for value in fields[1].split(): values.append(float(value)) q = ensure_numeric(values) msg = 'ERROR: File must contain at least one independent value' assert len(q.shape) == 1, msg # Read times proper from anuga.config import time_format import time, calendar fid = open(file_text) lines = fid.readlines() fid.close() N = len(lines) d = len(q) T = num.zeros(N, num.float) # Time Q = num.zeros((N, d), num.float) # Values for i, line in enumerate(lines): fields = line.split(',') if not time_as_seconds: realtime = calendar.timegm(time.strptime(fields[0], time_format)) else: realtime = float(fields[0]) T[i] = realtime - starttime for j, value in enumerate(fields[1].split()): Q[i, j] = float(value) msg = 'File %s must list time as a monotonuosly ' % file_text msg += 'increasing sequence' assert num.alltrue(T[1:] - T[:-1] > 0), msg #Create NetCDF file fid = NetCDFFile(file_out, netcdf_mode_w) fid.institution = 'Geoscience Australia' fid.description = 'Time series' #Reference point #Start time in seconds since the epoch (midnight 1/1/1970) #FIXME: Use Georef fid.starttime = starttime # dimension definitions #fid.createDimension('number_of_volumes', self.number_of_volumes) #fid.createDimension('number_of_vertices', 3) fid.createDimension('number_of_timesteps', len(T)) fid.createVariable('time', netcdf_float, ('number_of_timesteps', )) fid.variables['time'][:] = T for i in range(Q.shape[1]): try: name = quantity_names[i] except: name = 'Attribute%d' % i fid.createVariable(name, netcdf_float, ('number_of_timesteps', )) fid.variables[name][:] = Q[:, i] fid.close()
def intersection(line0, line1, rtol=1.0e-5, atol=1.0e-8): """Returns intersecting point between two line segments. However, if parallel lines coincide partly (i.e. share a common segment), the line segment where lines coincide is returned Inputs: line0, line1: Each defined by two end points as in: [[x0, y0], [x1, y1]] A line can also be a 2x2 numpy array with each row corresponding to a point. Output: status, value - where status and value is interpreted as follows: status == 0: no intersection, value set to None. status == 1: intersection point found and returned in value as [x,y]. status == 2: Collinear overlapping lines found. Value takes the form [[x0,y0], [x1,y1]]. status == 3: Collinear non-overlapping lines. Value set to None. status == 4: Lines are parallel. Value set to None. """ # FIXME (Ole): Write this in C line0 = ensure_numeric(line0, num.float) line1 = ensure_numeric(line1, num.float) x0 = line0[0, 0] y0 = line0[0, 1] x1 = line0[1, 0] y1 = line0[1, 1] x2 = line1[0, 0] y2 = line1[0, 1] x3 = line1[1, 0] y3 = line1[1, 1] denom = (y3 - y2) * (x1 - x0) - (x3 - x2) * (y1 - y0) u0 = (x3 - x2) * (y0 - y2) - (y3 - y2) * (x0 - x2) u1 = (x2 - x0) * (y1 - y0) - (y2 - y0) * (x1 - x0) if num.allclose(denom, 0.0, rtol=rtol, atol=atol): # Lines are parallel - check if they are collinear if num.allclose([u0, u1], 0.0, rtol=rtol, atol=atol): # We now know that the lines are collinear state_tuple = (point_on_line([x0, y0], line1, rtol=rtol, atol=atol), point_on_line([x1, y1], line1, rtol=rtol, atol=atol), point_on_line([x2, y2], line0, rtol=rtol, atol=atol), point_on_line([x3, y3], line0, rtol=rtol, atol=atol)) # print state_tuple return collinear_result[state_tuple]([x0, y0], [x1, y1], [x2, y2], [x3, y3]) else: # Lines are parallel but aren't collinear return 4, None # FIXME (Ole): Add distance here instead of None else: # Lines are not parallel, check if they intersect u0 = old_div(u0, denom) u1 = old_div(u1, denom) x = x0 + u0 * (x1 - x0) y = y0 + u0 * (y1 - y0) # Sanity check - can be removed to speed up if needed assert num.allclose(x, x2 + u1 * (x3 - x2), rtol=rtol, atol=atol) assert num.allclose(y, y2 + u1 * (y3 - y2), rtol=rtol, atol=atol) # Check if point found lies within given line segments if 0.0 <= u0 <= 1.0 and 0.0 <= u1 <= 1.0: # We have intersection return 1, num.array([x, y]) else: # No intersection return 0, None
def test_urs_ungridded2sww (self): #Zone: 50 #Easting: 240992.578 Northing: 7620442.472 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] time_step_count = 2 time_step = 400 tide = 9000000 base_name, files = self.write_mux(lat_long, time_step_count, time_step) urs_ungridded2sww(base_name, mean_stage=tide, verbose=self.verbose) # now I want to check the sww file ... sww_file = base_name + '.sww' #Let's interigate the sww file # Note, the sww info is not gridded. It is point data. fid = NetCDFFile(sww_file) # Make x and y absolute x = fid.variables['x'][:] y = fid.variables['y'][:] geo_reference = Geo_reference(NetCDFObject=fid) points = geo_reference.get_absolute(map(None, x, y)) points = ensure_numeric(points) x = points[:,0] y = points[:,1] #Check that first coordinate is correctly represented #Work out the UTM coordinates for first point zone, e, n = redfearn(lat_long[0][0], lat_long[0][1]) assert num.allclose([x[0],y[0]], [e,n]) #Check the time vector times = fid.variables['time'][:] times_actual = [] for i in range(time_step_count): times_actual.append(time_step * i) assert num.allclose(ensure_numeric(times), ensure_numeric(times_actual)) #Check first value stage = fid.variables['stage'][:] xmomentum = fid.variables['xmomentum'][:] ymomentum = fid.variables['ymomentum'][:] elevation = fid.variables['elevation'][:] assert num.allclose(stage[0,0], e +tide) #Meters #Check the momentums - ua #momentum = velocity*(stage-elevation) # elevation = - depth #momentum = velocity_ua *(stage+depth) # = n*(e+tide+n) based on how I'm writing these files # answer_x = n*(e+tide+n) actual_x = xmomentum[0,0] #print "answer_x",answer_x #print "actual_x",actual_x assert num.allclose(answer_x, actual_x) #Meters #Check the momentums - va #momentum = velocity*(stage-elevation) # elevation = - depth #momentum = velocity_va *(stage+depth) # = e*(e+tide+n) based on how I'm writing these files # answer_y = -1*e*(e+tide+n) actual_y = ymomentum[0,0] #print "answer_y",answer_y #print "actual_y",actual_y assert num.allclose(answer_y, actual_y) #Meters # check the stage values, first time step. # These arrays are equal since the Easting values were used as # the stage assert num.allclose(stage[0], x +tide) #Meters # check the elevation values. # -ve since urs measures depth, sww meshers height, # these arrays are equal since the northing values were used as # the elevation assert num.allclose(-elevation, y) #Meters fid.close() self.delete_mux(files) os.remove(sww_file)
def __init__(self, domain, end_points=None, exchange_lines=None, enquiry_points=None, invert_elevations=None, width=None, height=None, diameter=None, z1=None, z2=None, blockage=None, barrels=None, apron=None, manning=None, enquiry_gap=None, use_momentum_jet=False, zero_outflow_momentum=True, use_old_momentum_method=True, always_use_Q_wetdry_adjustment=True, force_constant_inlet_elevations=False, description=None, label=None, structure_type=None, logging=None, verbose=None): """ exchange_lines define the input lines for each inlet. If end_points = None, then the culvert_vector is calculated in the directions from the centre of echange_line[0] to centre of exchange_line[1} If end_points != None, then culvert_vector is unit vector in direction end_point[1] - end_point[0] """ anuga.Operator.__init__(self,domain) self.master_proc = 0 self.end_points = ensure_numeric(end_points) self.exchange_lines = ensure_numeric(exchange_lines) self.enquiry_points = ensure_numeric(enquiry_points) self.invert_elevations = ensure_numeric(invert_elevations) assert self.end_points is None or self.exchange_lines is None if height is None: height = width if width is None: width = diameter if apron is None: apron = width assert width is not None self.width = width self.height = height self.diameter = diameter self.z1 = z1 self.z2 = z2 self.blockage = blockage self.barrels = barrels self.apron = apron self.manning = manning self.enquiry_gap = enquiry_gap self.use_momentum_jet = use_momentum_jet self.zero_outflow_momentum = zero_outflow_momentum if use_momentum_jet and zero_outflow_momentum: msg = "Can't have use_momentum_jet and zero_outflow_momentum both True" raise Exception(msg) self.use_old_momentum_method = use_old_momentum_method self.always_use_Q_wetdry_adjustment = always_use_Q_wetdry_adjustment if description is None: self.description = ' ' else: self.description = description if label is None: self.label = "structure_%g" % Structure_operator.counter else: self.label = label + '_%g' % Structure_operator.counter if structure_type is None: self.structure_type = 'generic structure' else: self.structure_type = structure_type self.verbose = verbose # Keep count of structures Structure_operator.counter += 1 # Slots for recording current statistics self.accumulated_flow = 0.0 self.discharge = 0.0 self.discharge_abs_timemean = 0.0 self.velocity = 0.0 self.outlet_depth = 0.0 self.delta_total_energy = 0.0 self.driving_energy = 0.0 if exchange_lines is not None: self.__process_skew_culvert() elif end_points is not None: self.__process_non_skew_culvert() else: raise Exception, 'Define either exchange_lines or end_points' self.inlets = [] line0 = self.exchange_lines[0] #self.inlet_lines[0] if self.apron is None: poly0 = line0 else: offset = -self.apron*self.outward_vector_0 #print line0 #print offset poly0 = num.array([ line0[0], line0[1], line0[1]+offset, line0[0]+offset]) #print poly0 if self.invert_elevations is None: invert_elevation0 = None else: invert_elevation0 = self.invert_elevations[0] enquiry_point0 = self.enquiry_points[0] #outward_vector0 = - self.culvert_vector self.inlets.append(inlet_enquiry.Inlet_enquiry( self.domain, poly0, enquiry_point0, invert_elevation = invert_elevation0, outward_culvert_vector = self.outward_vector_0, verbose = self.verbose)) if force_constant_inlet_elevations: # Try to enforce a constant inlet elevation inlet_global_elevation = self.inlets[-1].get_average_elevation() self.inlets[-1].set_elevations(inlet_global_elevation) tris_0 = self.inlets[0].triangle_indices #print tris_0 #print self.domain.centroid_coordinates[tris_0] line1 = self.exchange_lines[1] if self.apron is None: poly1 = line1 else: offset = -self.apron*self.outward_vector_1 #print line1 #print offset poly1 = num.array([ line1[0], line1[1], line1[1]+offset, line1[0]+offset]) #print poly1 if self.invert_elevations is None: invert_elevation1 = None else: invert_elevation1 = self.invert_elevations[1] enquiry_point1 = self.enquiry_points[1] self.inlets.append(inlet_enquiry.Inlet_enquiry( self.domain, poly1, enquiry_point1, invert_elevation = invert_elevation1, outward_culvert_vector = self.outward_vector_1, verbose = self.verbose)) if force_constant_inlet_elevations: # Try to enforce a constant inlet elevation inlet_global_elevation = self.inlets[-1].get_average_elevation() self.inlets[-1].set_elevations(inlet_global_elevation) tris_1 = self.inlets[1].triangle_indices self.set_logging(logging)
def __init__(self, domain, quantity_name, rate=0.0, center=None, radius=None, polygon=None, default_rate=None, relative_time=True, verbose=False): from math import pi, cos, sin if domain.numproc > 1: msg = 'Not implemented to run in parallel' assert self.parallel_safe(), msg if center is None: msg = 'I got radius but no center.' assert radius is None, msg if radius is None: msg += 'I got center but no radius.' assert center is None, msg self.domain = domain self.quantity_name = quantity_name self.rate = rate self.center = ensure_numeric(center) self.radius = radius self.polygon = polygon self.verbose = verbose self.relative_time = relative_time self.value = 0.0 # Can be used to remember value at # previous timestep in order to obtain rate # Get boundary (in absolute coordinates) bounding_polygon = domain.get_boundary_polygon() # Update area if applicable if center is not None and radius is not None: assert len(center) == 2 msg = 'Polygon cannot be specified when center and radius are' assert polygon is None, msg # Check that circle center lies within the mesh. msg = 'Center %s specified for forcing term did not' % str(center) msg += 'fall within the domain boundary.' assert is_inside_polygon(center, bounding_polygon), msg # Check that circle periphery lies within the mesh. N = 100 periphery_points = [] for i in range(N): theta = old_div(2 * pi * i, 100) x = center[0] + radius * cos(theta) y = center[1] + radius * sin(theta) periphery_points.append([x, y]) for point in periphery_points: msg = 'Point %s on periphery for forcing term' % str(point) msg += ' did not fall within the domain boundary.' assert is_inside_polygon(point, bounding_polygon), msg if polygon is not None: # Check that polygon lies within the mesh. for point in self.polygon: msg = 'Point %s in polygon for forcing term' % str(point) msg += ' did not fall within the domain boundary.' assert is_inside_polygon(point, bounding_polygon), msg # Pointer to update vector self.update = domain.quantities[self.quantity_name].explicit_update # Determine indices in flow area N = len(domain) points = domain.get_centroid_coordinates(absolute=True) # Calculate indices in exchange area for this forcing term self.exchange_indices = None if self.center is not None and self.radius is not None: # Inlet is circular inlet_region = 'center=%s, radius=%s' % (self.center, self.radius) self.exchange_indices = [] for k in range(N): x, y = points[k, :] # Centroid c = self.center if ((x - c[0])**2 + (y - c[1])**2) < self.radius**2: self.exchange_indices.append(k) if self.polygon is not None: # Inlet is polygon self.exchange_indices = inside_polygon(points, self.polygon) if self.exchange_indices is None: self.exchange_area = polygon_area(bounding_polygon) else: if len(self.exchange_indices) == 0: msg = 'No triangles have been identified in ' msg += 'specified region: %s' % inlet_region raise Exception(msg) # Compute exchange area as the sum of areas of triangles identified # by circle or polygon self.exchange_area = 0.0 for i in self.exchange_indices: self.exchange_area += domain.areas[i] msg = 'Exchange area in forcing term' msg += ' has area = %f' % self.exchange_area assert self.exchange_area > 0.0 # Check and store default_rate msg = ('Keyword argument default_rate must be either None ' 'or a function of time.\nI got %s.' % str(default_rate)) assert (default_rate is None or isinstance(default_rate, (int, float)) or callable(default_rate)), msg if default_rate is not None: # If it is a constant, make it a function if not callable(default_rate): tmp = default_rate default_rate = lambda t: tmp # Check that default_rate is a function of one argument try: default_rate(0.0) except: raise Exception(msg) self.default_rate = default_rate self.default_rate_invoked = False # Flag
def get_netcdf_file_function(filename, quantity_names=None, interpolation_points=None, domain_starttime=None, time_thinning=1, time_limit=None, verbose=False, boundary_polygon=None, output_centroids=False): """Read time history of spatial data from NetCDF sww file and return a callable object f(t,x,y) which will return interpolated values based on the input file. Model time (domain_starttime) will be checked, possibly modified and returned All times are assumed to be in UTC See Interpolation function for further documetation """ # FIXME: Check that model origin is the same as file's origin # (both in UTM coordinates) # If not - modify those from file to match domain # (origin should be passed in) # Take this code from e.g. dem2pts in data_manager.py # FIXME: Use geo_reference to read and write xllcorner... import time, calendar from anuga.config import time_format # Open NetCDF file if verbose: log.critical('Reading %s' % filename) fid = NetCDFFile(filename, netcdf_mode_r) if isinstance(quantity_names, basestring): quantity_names = [quantity_names] if quantity_names is None or len(quantity_names) < 1: msg = 'No quantities are specified in file_function' raise Exception(msg) if interpolation_points is not None: #interpolation_points = num.array(interpolation_points, num.float) interpolation_points = ensure_absolute(interpolation_points) msg = 'Points must by N x 2. I got %d' % interpolation_points.shape[1] assert interpolation_points.shape[1] == 2, msg # Now assert that requested quantitites (and the independent ones) # are present in file missing = [] for quantity in ['time'] + quantity_names: if not fid.variables.has_key(quantity): missing.append(quantity) if len(missing) > 0: msg = 'Quantities %s could not be found in file %s'\ % (str(missing), filename) fid.close() raise Exception(msg) # Decide whether this data has a spatial dimension spatial = True for quantity in ['x', 'y']: if not fid.variables.has_key(quantity): spatial = False if filename[-3:] == 'tms' and spatial is True: msg = 'Files of type TMS must not contain spatial information' raise Exception(msg) if filename[-3:] == 'sww' and spatial is False: msg = 'Files of type SWW must contain spatial information' raise Exception(msg) if filename[-3:] == 'sts' and spatial is False: #What if mux file only contains one point msg = 'Files of type STS must contain spatial information' raise Exception(msg) # JJ REMOVED #if filename[-3:] == 'sts' and boundary_polygon is None: # #What if mux file only contains one point # msg = 'Files of type sts require boundary polygon' # raise Exception(msg) # Get first timestep try: starttime = float(fid.starttime) except ValueError: msg = 'Could not read starttime from file %s' % filename raise Exception(msg) # Get variables # if verbose: log.critical('Get variables' ) time = fid.variables['time'][:] # FIXME(Ole): Is time monotoneous? # Apply time limit if requested upper_time_index = len(time) msg = 'Time vector obtained from file %s has length 0' % filename assert upper_time_index > 0, msg if time_limit is not None: # Adjust given time limit to given start time time_limit = time_limit - starttime # Find limit point for i, t in enumerate(time): if t > time_limit: upper_time_index = i break msg = 'Time vector is zero. Requested time limit is %f' % time_limit assert upper_time_index > 0, msg if time_limit < time[-1] and verbose is True: log.critical('Limited time vector from %.2fs to %.2fs' % (time[-1], time_limit)) time = time[:upper_time_index] # Get time independent stuff if spatial: # Get origin #xllcorner = fid.xllcorner[0] #yllcorner = fid.yllcorner[0] #zone = fid.zone[0] xllcorner = fid.xllcorner yllcorner = fid.yllcorner zone = fid.zone x = fid.variables['x'][:] y = fid.variables['y'][:] if filename.endswith('sww'): triangles = fid.variables['volumes'][:] x = num.reshape(x, (len(x), 1)) y = num.reshape(y, (len(y), 1)) vertex_coordinates = num.concatenate((x, y), axis=1) #m x 2 array if boundary_polygon is not None: # Remove sts points that do not lie on boundary # FIXME(Ole): Why don't we just remove such points from the list of # points and associated data? # I am actually convinced we can get rid of neighbour_gauge_id # altogether as the sts file is produced using the ordering file. # All sts points are therefore always present in the boundary. # In fact, they *define* parts of the boundary. boundary_polygon=ensure_numeric(boundary_polygon) boundary_polygon[:, 0] -= xllcorner boundary_polygon[:, 1] -= yllcorner temp=[] boundary_id=[] gauge_id=[] for i in range(len(boundary_polygon)): for j in range(len(x)): if num.allclose(vertex_coordinates[j], boundary_polygon[i], 1e-4): #FIXME: #currently gauges lat and long is stored as float and #then cast to double. This cuases slight repositioning #of vertex_coordinates. temp.append(boundary_polygon[i]) gauge_id.append(j) boundary_id.append(i) break gauge_neighbour_id=[] for i in range(len(boundary_id)-1): if boundary_id[i]+1==boundary_id[i+1]: gauge_neighbour_id.append(i+1) else: gauge_neighbour_id.append(-1) if boundary_id[len(boundary_id)-1]==len(boundary_polygon)-1 \ and boundary_id[0]==0: gauge_neighbour_id.append(0) else: gauge_neighbour_id.append(-1) gauge_neighbour_id=ensure_numeric(gauge_neighbour_id) if len(num.compress(gauge_neighbour_id>=0, gauge_neighbour_id)) \ != len(temp)-1: msg='incorrect number of segments' raise Exception(msg) vertex_coordinates=ensure_numeric(temp) if len(vertex_coordinates)==0: msg = 'None of the sts gauges fall on the boundary' raise Exception(msg) else: gauge_neighbour_id=None if interpolation_points is not None: # Adjust for georef interpolation_points[:, 0] -= xllcorner interpolation_points[:, 1] -= yllcorner else: gauge_neighbour_id=None if domain_starttime is not None: # If domain_startime is *later* than starttime, # move time back - relative to domain's time if domain_starttime > starttime: time = time - domain_starttime + starttime # FIXME Use method in geo to reconcile # if spatial: # assert domain.geo_reference.xllcorner == xllcorner # assert domain.geo_reference.yllcorner == yllcorner # assert domain.geo_reference.zone == zone if verbose: log.critical('File_function data obtained from: %s' % filename) log.critical(' References:') if spatial: log.critical(' Lower left corner: [%f, %f]' % (xllcorner, yllcorner)) log.critical(' Start time: %f' % starttime) # Produce values for desired data points at # each timestep for each quantity quantities = {} for i, name in enumerate(quantity_names): quantities[name] = fid.variables[name][:] if boundary_polygon is not None: #removes sts points that do not lie on boundary quantities[name] = num.take(quantities[name], gauge_id, axis=1) # Close sww, tms or sts netcdf file fid.close() from anuga.fit_interpolate.interpolate import Interpolation_function if not spatial: vertex_coordinates = triangles = interpolation_points = None if filename[-3:] == 'sts':#added triangles = None #vertex coordinates is position of urs gauges if verbose: log.critical('Calling interpolation function') # Return Interpolation_function instance as well as # starttime for use to possible modify that of domain return (Interpolation_function(time, quantities, quantity_names, vertex_coordinates, triangles, interpolation_points, time_thinning=time_thinning, verbose=verbose, gauge_neighbour_id=gauge_neighbour_id, output_centroids=output_centroids), starttime)
def interpolate_block(self, f, point_coordinates, NODATA_value=NAN, use_cache=False, verbose=False, output_centroids=False): """ Call this if you want to control the blocking or make sure blocking doesn't occur. Return the point data, z. See interpolate for doc info. """ # FIXME (Ole): I reckon we should change the interface so that # the user can specify the interpolation matrix instead of the # interpolation points to save time. if isinstance(point_coordinates, Geospatial_data): point_coordinates = point_coordinates.get_data_points( absolute=True) # Convert lists to numeric arrays if necessary point_coordinates = ensure_numeric(point_coordinates, num.float) f = ensure_numeric(f, num.float) from anuga.caching import myhash import sys if use_cache is True: if sys.platform != 'win32': # FIXME (Ole): (Why doesn't this work on windoze?) # Still absolutely fails on Win 24 Oct 2008 X = cache(self._build_interpolation_matrix_A, args=(point_coordinates, output_centroids), kwargs={'verbose': verbose}, verbose=verbose) else: # FIXME # Hash point_coordinates to memory location, reuse if possible # This will work on Linux as well if we want to use it there. key = myhash(point_coordinates) reuse_A = False if key in self.interpolation_matrices: X, stored_points = self.interpolation_matrices[key] if num.alltrue(stored_points == point_coordinates): reuse_A = True # Reuse interpolation matrix if reuse_A is False: X = self._build_interpolation_matrix_A(point_coordinates, output_centroids, verbose=verbose) self.interpolation_matrices[key] = (X, point_coordinates) else: X = self._build_interpolation_matrix_A(point_coordinates, output_centroids, verbose=verbose) # Unpack result self._A, self.inside_poly_indices, self.outside_poly_indices, self.centroids = X # Check that input dimensions are compatible msg = 'Two columns must be specified in point coordinates. ' \ 'I got shape=%s' % (str(point_coordinates.shape)) assert point_coordinates.shape[1] == 2, msg msg = 'The number of rows in matrix A must be the same as the ' msg += 'number of points supplied.' msg += ' I got %d points and %d matrix rows.' \ % (point_coordinates.shape[0], self._A.shape[0]) assert point_coordinates.shape[0] == self._A.shape[0], msg msg = 'The number of columns in matrix A must be the same as the ' msg += 'number of mesh vertices.' msg += ' I got %d vertices and %d matrix columns.' \ % (f.shape[0], self._A.shape[1]) assert self._A.shape[1] == f.shape[0], msg # Compute Matrix vector product and return return self._get_point_data_z(f, NODATA_value=NODATA_value)
def generate_mesh(points=None, segments=None, holes=None, regions=None, pointatts=None, segatts=None, mode=None, dummy_test=None): """ pointatts can be a list of lists. generatedtriangleattributelist is used to represent tagged regions. #FIXME (DSG-DSG): add comments """ #FIXME (DSG-DSG): Catch parameters that are lists, #instead of lists of lists # check shape[1] is 2 etc if points is None: points = [] if segments is None: segments = [] if holes is None: holes = [] if regions is None: regions = [] if dummy_test is None: dummy_test = [] try: points = ensure_numeric(points, num.float) except ValueError: msg = 'ERROR: Inconsistent points array.' raise ANUGAError(msg) if points.shape[1] != 2: msg = 'ERROR: Bad shape points array.' raise ANUGAError(msg) # This is after points is numeric if pointatts is None or pointatts == []: pointatts = [[] for x in range(points.shape[0])] try: # If num.int is used, instead of num.int32, it fails in Linux segments = ensure_numeric(segments, num.int32) except ValueError: msg = 'ERROR: Inconsistent segments array.' raise ANUGAError(msg) # This is after segments is numeric if segatts is None or segatts == []: segatts = [0 for x in range(segments.shape[0])] try: holes = ensure_numeric(holes, num.float) except ValueError: msg = 'ERROR: Inconsistent holes array.' raise ANUGAError(msg) regions = add_area_tag(regions) try: regions = ensure_numeric(regions, num.float) except (ValueError, TypeError): msg = 'ERROR: Inconsistent regions array.' raise ANUGAError(msg) if not regions.shape[0] == 0 and regions.shape[1] <= 2: msg = 'ERROR: Bad shape points array.' raise ANUGAError(msg) try: pointatts = ensure_numeric(pointatts, num.float) except (ValueError, TypeError): msg = 'ERROR: Inconsistent point attributes array.' raise ANUGAError(msg) if pointatts.shape[0] != points.shape[0]: msg = """ERROR: Point attributes array not the same shape as point array.""" raise ANUGAError(msg) if len(pointatts.shape) == 1: pointatts = num.reshape(pointatts, (pointatts.shape[0], 1)) try: segatts = ensure_numeric(segatts, num.int32) except ValueError: msg = 'ERROR: Inconsistent point attributes array.' raise ANUGAError(msg) if segatts.shape[0] != segments.shape[0]: msg = """ERROR: Segment attributes array not the same shape as segment array.""" raise ANUGAError(msg) if mode.find('n'): #pass mode = 'j' + mode # j- Jettisons vertices that are not part of the final # triangulation from the output .node file (including duplicate # input vertices and vertices ``eaten'' by holes). - output a # list of neighboring triangles # EG handles lone verts! # # GD (June 2014): We get segfaults in some cases with breakLines, unless # we remove repeated values in 'points', and adjust segments accordingly # pts_complex = points[:, 0] + 1j * points[:, 1] # Use to match points i = 0 # Use this as a counter, since the length of 'points' changes as we go while (i < len(pts_complex) - 1): i = i + 1 # Maximum i = len(pts_complex)-1 = largest index of points # # Check if points[i,] is the same as a previous point if (any(pts_complex[i] == pts_complex[0:i])): # Get index of previous occurrence earlierInd = (pts_complex[i] == pts_complex[0:i]).nonzero()[0][0] # Remove the ith point, and adjust the segments for ii in range(len(segments)): for j in range(2): if (segments[ii, j] == i): # Segment will use previous occurrence of this point segments[ii, j] = earlierInd if (segments[ii, j] > i): # Decrement the index (since we will remove point[i,]) segments[ii, j] = segments[ii, j] - 1 # Remove ith point points = num.delete(points, i, 0) pointatts = num.delete(pointatts, i, 0) # Recompute the complex number points for matching pts_complex = points[:, 0] + 1j * points[:, 1] i = i - 1 # Repeat for the last value of i = next point #print(points,segments,holes,regions, pointatts,segatts) in_tri = ({'vertices': points}) if segments.size != 0: in_tri['segments'] = segments refine = True if holes.size != 0: in_tri['holes'] = holes if regions.size != 0: in_tri['regions'] = regions if pointatts.size != 0: in_tri['vertex_attributes'] = pointatts if segatts.size != 0: in_tri['segment_markers'] = segatts tri = triang.triangulate(in_tri, mode) if 'vertices' in tri: pointlist = num.ascontiguousarray(tri['vertices']) else: pointlist = num.empty((0, 2), dtype=num.float) if 'vertex_markers' in tri: pointmarkerlist = num.ascontiguousarray( tri['vertex_markers'].reshape(-1)) else: pointmarkerlist = num.empty(pointlist.shape[0], dtype=num.int32) if 'triangles' in tri: trianglelist = num.ascontiguousarray(tri['triangles']) else: trianglelist = num.empty((0, 3), dtype=num.int32) if 'vertex_attributes' in tri: pointattributelist = num.ascontiguousarray(tri['vertex_attributes']) else: pointattributelist = num.empty((pointlist.shape[0], 0), dtype=num.float) if 'triangle_attributes' in tri: triangleattributelist = num.ascontiguousarray( tri['triangle_attributes']) else: triangleattributelist = num.empty((trianglelist.shape[0], 0), dtype=num.float) if 'segments' in tri: segmentlist = num.ascontiguousarray(tri['segments']) else: segmentlist = num.empty((0, 2), dtype=num.int32) if 'segment_markers' in tri: segmentmarkerlist = num.ascontiguousarray( tri['segment_markers'].reshape(-1)) else: segmentmarkerlist = num.empty(segmentlist.shape[0], dtype=num.int32) if 'neighbors' in tri: neighborlist = num.ascontiguousarray(tri['neighbors']) else: neighborlist = num.empty((trianglelist.shape[0], 3), dtype=num.int32) mesh_dict = {} # the values as arrays mesh_dict['generatedtrianglelist'] = trianglelist mesh_dict['generatedpointlist'] = pointlist # WARNING - generatedpointmarkerlist IS UNTESTED mesh_dict['generatedpointmarkerlist'] = pointmarkerlist mesh_dict['generatedpointattributelist'] = pointattributelist mesh_dict['generatedsegmentlist'] = segmentlist mesh_dict['generatedsegmentmarkerlist'] = segmentmarkerlist mesh_dict['generatedtriangleneighborlist'] = neighborlist mesh_dict['qaz'] = 1 #debugging #mesh_dict['triangleattributelist'] = triangleattributelist if True: mesh_dict['generatedtriangleattributelist'] = triangleattributelist if mesh_dict['generatedtriangleattributelist'].shape[1] == 0: mesh_dict['generatedtriangleattributelist'] = None if mesh_dict['generatedpointattributelist'].shape[1] == 0: mesh_dict['generatedpointattributelist'] = None if mesh_dict['generatedtriangleneighborlist'].shape[1] == 0: mesh_dict['generatedtriangleneighborlist'] = None if trianglelist.shape[0] == 0: # There are no triangles. # this is used by urs_ungridded2sww raise NoTrianglesError a = mesh_dict['generatedtriangleattributelist'] # the structure of generatedtriangleattributelist is an list of # list of integers. It is transformed into a list of list of # strings later on. This is then inputted into an triangle # object. The triangle object outputs a list of strings. Note # the subtle change! How should I handle this? For my code, when # the list of list of integers is transformed, transform it into a # list of strings, not a list of list of strings. return mesh_dict
def interpolate(vertex_coordinates, triangles, vertex_values, interpolation_points, mesh_origin=None, start_blocking_len=500000, use_cache=False, verbose=False, output_centroids=False): """Interpolate vertex_values to interpolation points. Inputs (mandatory): vertex_coordinates: List of coordinate pairs [xi, eta] of points constituting a mesh (or an m x 2 numeric array or a geospatial object) Points may appear multiple times (e.g. if vertices have discontinuities) triangles: List of 3-tuples (or a numeric array) of integers representing indices of all vertices in the mesh. vertex_values: Vector or array of data at the mesh vertices. If array, interpolation will be done for each column as per underlying matrix-matrix multiplication interpolation_points: Interpolate mesh data to these positions. List of coordinate pairs [x, y] of data points or an nx2 numeric array or a Geospatial_data object Inputs (optional) mesh_origin: A geo_reference object or 3-tuples consisting of UTM zone, easting and northing. If specified vertex coordinates are assumed to be relative to their respective origins. Note: Don't supply a vertex coords as a geospatial object and a mesh origin, since geospatial has its own mesh origin. start_blocking_len: If the # of points is more or greater than this, start blocking use_cache: True or False Output: Interpolated values at specified point_coordinates Note: This function is a simple shortcut for case where interpolation matrix is unnecessary Note: This function does not take blocking into account, but allows caching. """ # FIXME(Ole): Probably obsolete since I is precomputed and # interpolate_block caches from anuga.caching import cache # Create interpolation object with matrix args = (ensure_numeric(vertex_coordinates, num.float), ensure_numeric(triangles)) kwargs = {'mesh_origin': mesh_origin, 'verbose': verbose} if use_cache is True: I = cache(Interpolate, args, kwargs, verbose=verbose) else: I = Interpolate(*args, **kwargs) # Call interpolate method with interpolation points result = I.interpolate_block(vertex_values, interpolation_points, use_cache=use_cache, verbose=verbose, output_centroids=output_centroids) return result
def __init__(self, vertex_coordinates=None, triangles=None, mesh=None, mesh_origin=None, verbose=False): """ Build interpolation matrix mapping from function values at vertices to function values at data points Pass in a mesh instance or vertex_coordinates and triangles and optionally mesh_origin Inputs: vertex_coordinates: List of coordinate pairs [xi, eta] of points constituting a mesh (or an m x 2 numeric array or a geospatial object) Points may appear multiple times (e.g. if vertices have discontinuities) triangles: List of 3-tuples (or a numeric array) of integers representing indices of all vertices in the mesh. mesh: A mesh instance describing the mesh. mesh_origin: A geo_reference object or 3-tuples consisting of UTM zone, easting and northing. If specified vertex coordinates are assumed to be relative to their respective origins. Note: Don't supply a vertex coords as a geospatial object and a mesh origin, since geospatial has its own mesh origin. """ # NOTE PADARN: The Fit_Interpolate class now uses a the c based # quad tree to store triangles, rather than the python based tree. # The tree is still stored at self.root. However, the subtrees of # the new quad tree can not be directly accessed by python as # was previously possible. # Most of the previous functionality has been preserved. global build_quadtree_time if mesh is None: if vertex_coordinates is not None and triangles is not None: # Fixme (DSG) Throw errors if triangles or vertex_coordinates # are None # Convert input to numeric arrays triangles = ensure_numeric(triangles, num.int) vertex_coordinates = ensure_absolute(vertex_coordinates, geo_reference=mesh_origin) if verbose: log.critical('FitInterpolate: Building mesh') self.mesh = Mesh(vertex_coordinates, triangles) #self.mesh.check_integrity() # Time consuming else: self.mesh = None else: self.mesh = mesh if self.mesh is not None: if verbose: log.critical('FitInterpolate: Building quad tree') #This stores indices of vertices t0 = time.time() self.root = MeshQuadtree(self.mesh, verbose=verbose) build_quadtree_time = time.time() - t0
def _create_mesh_from_regions(bounding_polygon, boundary_tags, maximum_triangle_area=None, filename=None, interior_regions=None, interior_holes=None, hole_tags=None, poly_geo_reference=None, mesh_geo_reference=None, minimum_triangle_angle=28.0, fail_if_polygons_outside=True, breaklines=None, verbose=True, regionPtArea=None): """_create_mesh_from_regions - internal function. See create_mesh_from_regions for documentation. """ # check the segment indexes - throw an error if they are out of bounds if boundary_tags is not None: max_points = len(bounding_polygon) for key in boundary_tags.keys(): if len([x for x in boundary_tags[key] if x > max_points - 1]) >= 1: msg = 'Boundary tag %s has segment out of bounds. '\ %(str(key)) msg += 'Number of points in bounding polygon = %d' % max_points raise SegmentError(msg) for i in range(max_points): found = False for tag in boundary_tags: if i in boundary_tags[tag]: found = True if found is False: msg = 'Segment %d was not assigned a boundary_tag.' % i msg += 'Default tag "exterior" will be assigned to missing segment' #raise Exception(msg) # Fixme: Use proper Python warning if verbose: log.critical('WARNING: %s' % msg) #In addition I reckon the polygons could be of class Geospatial_data #(DSG) If polygons were classes caching would break in places. # Simple check bounding_polygon = ensure_numeric(bounding_polygon, num.float) msg = 'Bounding polygon must be a list of points or an Nx2 array' assert len(bounding_polygon.shape) == 2, msg assert bounding_polygon.shape[1] == 2, msg # if interior_regions is not None: # Test that all the interior polygons are inside the # bounding_poly and throw out those that aren't fully # included. #Note, Both poly's have the same geo_ref, # therefore don't take into account # geo_ref polygons_inside_boundary = [] for interior_polygon, res in interior_regions: indices = inside_polygon(interior_polygon, bounding_polygon, closed=True, verbose=False) if len(indices) <> len(interior_polygon): msg = 'Interior polygon %s is not fully inside'\ %(str(interior_polygon)) msg += ' bounding polygon: %s.' % (str(bounding_polygon)) if fail_if_polygons_outside is True: raise PolygonError(msg) else: msg += ' I will ignore it.' log.critical(msg) else: polygons_inside_boundary.append([interior_polygon, res]) # Record only those that were fully contained interior_regions = polygons_inside_boundary # the following segment of code could be used to Test that all the # interior polygons are inside the bounding_poly... however it might need # to be change a bit # #count = 0 #for i in range(len(interior_regions)): # region = interior_regions[i] # interior_polygon = region[0] # if len(inside_polygon(interior_polygon, bounding_polygon, # closed = True, verbose = False)) <> len(interior_polygon): # print 'WARNING: interior polygon %d is outside bounding polygon' %(i) # count += 1 #if count == 0: # print 'interior regions OK' #else: # print 'check out your interior polygons' # print 'check %s in production directory' %figname # import sys; sys.exit() if interior_holes is not None: # Test that all the interior polygons are inside the bounding_poly for interior_polygon in interior_holes: # Test that we have a polygon if len(num.array(interior_polygon).flat) < 6: msg = 'Interior hole polygon %s has too few (<3) points.\n' \ %(str(interior_polygon)) msg = msg + '(Insure that you have specified a LIST of interior hole polygons)' raise PolygonError(msg) indices = inside_polygon(interior_polygon, bounding_polygon, closed=True, verbose=False) if len(indices) <> len(interior_polygon): msg = 'Interior polygon %s is outside bounding polygon: %s'\ %(str(interior_polygon), str(bounding_polygon)) raise PolygonError(msg) # Resolve geo referencing if mesh_geo_reference is None: xllcorner = min(bounding_polygon[:, 0]) yllcorner = min(bounding_polygon[:, 1]) # if poly_geo_reference is None: zone = DEFAULT_ZONE else: zone = poly_geo_reference.get_zone() [(xllcorner,yllcorner)] = poly_geo_reference.get_absolute( \ [(xllcorner,yllcorner)]) # create a geo_ref, based on the llc of the bounding_polygon mesh_geo_reference = Geo_reference(xllcorner=xllcorner, yllcorner=yllcorner, zone=zone) m = Mesh(geo_reference=mesh_geo_reference) # build a list of discrete segments from the breakline polygons if breaklines is not None: points, verts = polylist2points_verts(breaklines) m.add_points_and_segments(points, verts) # Do bounding polygon m.add_region_from_polygon(bounding_polygon, segment_tags=boundary_tags, geo_reference=poly_geo_reference) # Find one point inside region automatically if interior_regions is not None: excluded_polygons = [] for polygon, res in interior_regions: excluded_polygons.append(polygon) else: excluded_polygons = None # Convert bounding poly to absolute values # this sort of thing can be fixed with the geo_points class if poly_geo_reference is not None: bounding_polygon_absolute = \ poly_geo_reference.get_absolute(bounding_polygon) else: bounding_polygon_absolute = bounding_polygon inner_point = point_in_polygon(bounding_polygon_absolute) inner = m.add_region(inner_point[0], inner_point[1]) inner.setMaxArea(maximum_triangle_area) # Do interior regions # if interior_regions is not None: # for polygon, res in interior_regions: # m.add_region_from_polygon(polygon, # geo_reference=poly_geo_reference) # # convert bounding poly to absolute values # if poly_geo_reference is not None: # polygon_absolute = \ # poly_geo_reference.get_absolute(polygon) # else: # polygon_absolute = polygon # inner_point = point_in_polygon(polygon_absolute) # region = m.add_region(inner_point[0], inner_point[1]) # region.setMaxArea(res) if interior_regions is not None: for polygon, res in interior_regions: m.add_region_from_polygon(polygon, max_triangle_area=res, geo_reference=poly_geo_reference) # Do interior holes if interior_holes is not None: for n, polygon in enumerate(interior_holes): try: tags = hole_tags[n] except: tags = {} m.add_hole_from_polygon(polygon, segment_tags=tags, geo_reference=poly_geo_reference) # 22/04/2014 # Add user-specified point-based regions with max area if (regionPtArea is not None): for i in range(len(regionPtArea)): inner = m.add_region(regionPtArea[i][0], regionPtArea[i][1]) inner.setMaxArea(regionPtArea[i][2]) # NOTE (Ole): This was moved here as it is annoying if mesh is always # stored irrespective of whether the computation # was cached or not. This caused Domain to # recompute as it has meshfile as a dependency # Decide whether to store this mesh or return it if filename is None: return m else: if verbose: log.critical("Generating mesh to file '%s'" % filename) m.generate_mesh(minimum_triangle_angle=minimum_triangle_angle, verbose=verbose) m.export_mesh_file(filename) return m
def urs2sts(basename_in, basename_out=None, weights=None, verbose=False, origin=None, zone=None, central_meridian=None, mean_stage=0.0, zscale=1.0, ordering_filename=None): """Convert URS mux2 format for wave propagation to sts format Also convert latitude and longitude to UTM. All coordinates are assumed to be given in the GDA94 datum origin is a 3-tuple with geo referenced UTM coordinates (zone, easting, northing) inputs: basename_in: list of source file prefixes These are combined with the extensions: WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage EAST_VELOCITY_MUX2_LABEL = '-e-mux2' xmomentum NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' and ymomentum to create a 2D list of mux2 file. The rows are associated with each quantity and must have the above extensions the columns are the list of file prefixes. ordering: a .txt file name specifying which mux2 gauge points are to be stored. This is indicated by the index of the gauge in the ordering file. ordering file format: 1st line: 'index,longitude,latitude\n' other lines: index,longitude,latitude If ordering is None or ordering file is empty then all points are taken in the order they appear in the mux2 file. output: basename_out: name of sts file in which mux2 data is stored. NOTE: South is positive in mux files so sign of y-component of velocity is reverted """ import os from anuga.file.netcdf import NetCDFFile from operator import __and__ if not isinstance(basename_in, list): if verbose: log.critical('Reading single source') basename_in = [basename_in] # This is the value used in the mux file format to indicate NAN data # FIXME (Ole): This should be changed everywhere to IEEE NAN when # we upgrade to Numpy NODATA = 99 # Check that basename is a list of strings if not reduce(__and__, map(lambda z: isinstance(z, basestring), basename_in)): msg = 'basename_in must be a string or list of strings' raise Exception, msg # Find the number of sources to be used numSrc = len(basename_in) # A weight must be specified for each source if weights is None: # Default is equal weighting weights = num.ones(numSrc, num.float) / numSrc else: weights = ensure_numeric(weights) msg = 'When combining multiple sources must specify a weight for ' \ 'mux2 source file' assert len(weights) == numSrc, msg if verbose: log.critical('Weights used in urs2sts: %s' % str(weights)) # Check output filename if basename_out is None: msg = 'STS filename must be specified as basename_out ' \ 'in function urs2sts' raise Exception, msg if basename_out.endswith('.sts'): stsname = basename_out else: stsname = basename_out + '.sts' # Create input filenames from basenames and check their existence files_in = [[], [], []] for files in basename_in: files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL), files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL) files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL) quantities = ['HA', 'UA', 'VA'] # Quantity names used in the MUX2 format for i in range(len(quantities)): for file_in in files_in[i]: if (os.access(file_in, os.R_OK) == 0): msg = 'File %s does not exist or is not accessible' % file_in raise IOError, msg # Establish permutation array if ordering_filename is not None: if verbose is True: log.critical('Reading ordering file %s' % ordering_filename) # Read ordering file try: fid = open(ordering_filename, 'r') file_header = fid.readline().split(',') ordering_lines = fid.readlines() fid.close() except: msg = 'Cannot open %s' % ordering_filename raise Exception, msg reference_header = 'index, longitude, latitude\n' reference_header_split = reference_header.split(',') for i in range(3): if not file_header[i].strip() == reference_header_split[i].strip(): msg = 'File must contain header: ' + reference_header raise Exception, msg if len(ordering_lines) < 2: msg = 'File must contain at least two points' raise Exception, msg permutation = [int(line.split(',')[0]) for line in ordering_lines] permutation = ensure_numeric(permutation) else: permutation = None # Read MUX2 files if (verbose): log.critical('reading mux2 file') mux = {} times_old = 0.0 latitudes_old = 0.0 longitudes_old = 0.0 elevation_old = 0.0 starttime_old = 0.0 for i, quantity in enumerate(quantities): # For each quantity read the associated list of source mux2 file with # extention associated with that quantity times, latitudes, longitudes, elevation, mux[quantity], starttime \ = read_mux2_py(files_in[i], weights, permutation, verbose=verbose) # Check that all quantities have consistent time and space information if quantity != quantities[0]: msg = '%s, %s and %s have inconsistent gauge data' \ % (files_in[0], files_in[1], files_in[2]) assert num.allclose(times, times_old), msg assert num.allclose(latitudes, latitudes_old), msg assert num.allclose(longitudes, longitudes_old), msg assert num.allclose(elevation, elevation_old), msg assert num.allclose(starttime, starttime_old), msg times_old = times latitudes_old = latitudes longitudes_old = longitudes elevation_old = elevation starttime_old = starttime # Self check - can be removed to improve speed #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines] #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines] # #msg = 'Longitudes specified in ordering file do not match those ' \ # 'found in mux files. ' \ # 'I got %s instead of %s (only beginning shown)' \ # % (str(longitudes[:10]) + '...', # str(ref_longitudes[:10]) + '...') #assert allclose(longitudes, ref_longitudes), msg # #msg = 'Latitudes specified in ordering file do not match those ' \ # 'found in mux files. ' # 'I got %s instead of %s (only beginning shown)' \ # % (str(latitudes[:10]) + '...', # str(ref_latitudes[:10]) + '...') #assert allclose(latitudes, ref_latitudes), msg # Store timeseries in STS file msg = 'File is empty and or clipped region not in file region' assert len(latitudes > 0), msg number_of_points = latitudes.shape[0] # Number of stations retrieved number_of_times = times.shape[0] # Number of timesteps number_of_latitudes = latitudes.shape[0] # Number latitudes number_of_longitudes = longitudes.shape[0] # Number longitudes # The permutation vector of contains original indices # as given in ordering file or None in which case points # are assigned the trivial indices enumerating them from # 0 to number_of_points-1 if permutation is None: permutation = num.arange(number_of_points, dtype=num.int) # NetCDF file definition outfile = NetCDFFile(stsname, netcdf_mode_w) description = 'Converted from URS mux2 files: %s' % basename_in # Create new file sts = Write_sts() sts.store_header(outfile, times + starttime, number_of_points, description=description, verbose=verbose, sts_precision=netcdf_float) # Store from anuga.coordinate_transforms.redfearn import redfearn x = num.zeros(number_of_points, num.float) # Easting y = num.zeros(number_of_points, num.float) # Northing # Check zone boundaries if zone is None: refzone, _, _ = redfearn(latitudes[0], longitudes[0], central_meridian=central_meridian) else: refzone = zone old_zone = refzone old_easting = 0.0 old_northing = 0.0 for i in range(number_of_points): computed_zone, easting, northing = redfearn( latitudes[i], longitudes[i], zone=zone, central_meridian=central_meridian) x[i] = easting y[i] = northing if computed_zone != refzone: msg = 'All sts gauges need to be in the same zone. \n' msg += 'offending gauge:Zone %d,%.4f, %4f\n' \ % (computed_zone, easting, northing) msg += 'previous gauge:Zone %d,%.4f, %4f' \ % (old_zone, old_easting, old_northing) raise Exception, msg old_zone = computed_zone old_easting = easting old_northing = northing if origin is None: origin = Geo_reference(refzone, min(x), min(y)) geo_ref = write_NetCDF_georeference(origin, outfile) elevation = num.resize(elevation, outfile.variables['elevation'][:].shape) outfile.variables['permutation'][:] = permutation.astype( num.int32) # Opteron 64 outfile.variables['x'][:] = x - geo_ref.get_xllcorner() outfile.variables['y'][:] = y - geo_ref.get_yllcorner() outfile.variables['elevation'][:] = elevation stage = outfile.variables['stage'] xmomentum = outfile.variables['xmomentum'] ymomentum = outfile.variables['ymomentum'] if verbose: log.critical('Converting quantities') for j in range(len(times)): for i in range(number_of_points): ha = mux['HA'][i, j] ua = mux['UA'][i, j] va = mux['VA'][i, j] if ha == NODATA: if verbose: msg = 'Setting nodata value %d to 0 at time = %f, ' \ 'point = %d' % (ha, times[j], i) log.critical(msg) ha = 0.0 ua = 0.0 va = 0.0 w = zscale * ha + mean_stage h = w - elevation[i] stage[j, i] = w xmomentum[j, i] = ua * h ymomentum[j, i] = -va * h # South is positive in mux files outfile.close() if verbose: log.critical('Wrote sts file ' + stsname)
def _fit_to_mesh(point_coordinates, vertex_coordinates=None, triangles=None, mesh=None, point_attributes=None, alpha=DEFAULT_ALPHA, verbose=False, mesh_origin=None, data_origin=None, max_read_lines=None, attribute_name=None, cg_precon='Jacobi', use_c_cg=True): """ Fit a smooth surface to a triangulation, given data points with attributes. Inputs: vertex_coordinates: List of coordinate pairs [xi, eta] of points constituting a mesh (or an m x 2 numeric array or a geospatial object) Points may appear multiple times (e.g. if vertices have discontinuities) triangles: List of 3-tuples (or a numeric array) of integers representing indices of all vertices in the mesh. point_coordinates: List of coordinate pairs [x, y] of data points (or an nx2 numeric array). This can also be a .csv/.txt/.pts file name. alpha: Smoothing parameter. mesh_origin: A geo_reference object or 3-tuples consisting of UTM zone, easting and northing. If specified vertex coordinates are assumed to be relative to their respective origins. point_attributes: Vector or array of data at the point_coordinates. """ if mesh is None: # FIXME(DSG): Throw errors if triangles or vertex_coordinates # are None # Convert input to numeric arrays triangles = ensure_numeric(triangles, num.int) vertex_coordinates = ensure_absolute(vertex_coordinates, geo_reference=mesh_origin) if verbose: log.critical('_fit_to_mesh: Building mesh') mesh = Mesh(vertex_coordinates, triangles) # Don't need this as we have just created the mesh # mesh.check_integrity() interp = Fit(mesh=mesh, verbose=verbose, alpha=alpha, cg_precon=cg_precon, use_c_cg=use_c_cg) vertex_attributes = interp.fit(point_coordinates, point_attributes, point_origin=data_origin, max_read_lines=max_read_lines, attribute_name=attribute_name, verbose=verbose) # Add the value checking stuff that's in least squares. # Maybe this stuff should get pushed down into Fit. # at least be a method of Fit. # Or intigrate it into the fit method, saving teh max and min's # as att's. return vertex_attributes
def sts2sww_mesh(basename_in, basename_out=None, spatial_thinning=1, verbose=False): from anuga.mesh_engine.mesh_engine import NoTrianglesError from anuga.pmesh.mesh import Mesh if verbose: print("Starting sts2sww_mesh") mean_stage=0. zscale=1. if (basename_in[:-4]=='.sts'): stsname = basename_in else: stsname = basename_in + '.sts' if verbose: print("Reading sts NetCDF file: %s" %stsname) infile = NetCDFFile(stsname, netcdf_mode_r) cellsize = infile.cellsize ncols = infile.ncols nrows = infile.nrows no_data = infile.no_data refzone = infile.zone x_origin = infile.xllcorner y_origin = infile.yllcorner origin = num.array([x_origin, y_origin]) x = infile.variables['x'][:] y = infile.variables['y'][:] times = infile.variables['time'][:] wind_speed_full = infile.variables['wind_speed'][:] wind_angle_full = infile.variables['wind_angle'][:] pressure_full = infile.variables['barometric_pressure'][:] infile.close() number_of_points = nrows*ncols points_utm = num.zeros((number_of_points,2),num.float) points_utm[:,0]=x+x_origin points_utm[:,1]=y+y_origin thinned_indices=[] for i in range(number_of_points): if (old_div(i,ncols)==0 or old_div(i,ncols)==ncols-1 or (old_div(i,ncols))%(spatial_thinning)==0): if ( i%(spatial_thinning)==0 or i%nrows==0 or i%nrows==nrows-1 ): thinned_indices.append(i) #Spatial thinning points_utm=points_utm[thinned_indices] number_of_points = points_utm.shape[0] number_of_timesteps = wind_speed_full.shape[0] wind_speed = num.empty((number_of_timesteps,number_of_points),dtype=float) wind_angle = num.empty((number_of_timesteps,number_of_points),dtype=float) barometric_pressure = num.empty((number_of_timesteps,number_of_points),dtype=float) if verbose: print("Total number of points: ", nrows*ncols) print("Number of thinned points: ", number_of_points) for i in range(number_of_timesteps): wind_speed[i] = wind_speed_full[i,thinned_indices] wind_angle[i] = wind_angle_full[i,thinned_indices] barometric_pressure[i] = pressure_full[i,thinned_indices] #P.plot(points_utm[:,0],points_utm[:,1],'ro') #P.show() if verbose: print("Generating sww triangulation of gems data") mesh = Mesh() mesh.add_vertices(points_utm) mesh.auto_segment(smooth_indents=True, expand_pinch=True) mesh.auto_segment(mesh.shape.get_alpha() * 1.1) try: mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False) except NoTrianglesError: # This is a bit of a hack, going in and changing the data structure. mesh.holes = [] mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False) mesh_dic = mesh.Mesh2MeshList() points_utm=ensure_numeric(points_utm) assert num.alltrue(ensure_numeric(mesh_dic['generatedpointlist']) == ensure_numeric(points_utm)) volumes = mesh_dic['generatedtrianglelist'] # Write sww intro and grid stuff. if (basename_out is not None and basename_out[:-4]=='.sww'): swwname = basename_out else: swwname = basename_in + '.sww' if verbose: 'Output to %s' % swwname if verbose: print("Writing sww wind and pressure field file") outfile = NetCDFFile(swwname, netcdf_mode_w) sww = Write_sww([], ['wind_speed','wind_angle','barometric_pressure']) sww.store_header(outfile, times, len(volumes), len(points_utm), verbose=verbose, sww_precision='d') outfile.mean_stage = mean_stage outfile.zscale = zscale sww.store_triangulation(outfile, points_utm, volumes, refzone, new_origin=origin, #check effect of this line verbose=verbose) if verbose: print('Converting quantities') # Read in a time slice from the sts file and write it to the SWW file #print wind_angle[0,:10] for i in range(len(times)): sww.store_quantities(outfile, slice_index=i, verbose=verbose, wind_speed=wind_speed[i,:], wind_angle=wind_angle[i,:], barometric_pressure=barometric_pressure[i,:], sww_precision=num.float) if verbose: sww.verbose_quantities(outfile) outfile.close()
def get_min_max_indices(latitudes_ref, longitudes_ref, minlat=None, maxlat=None, minlon=None, maxlon=None): """ Return max, min indexes (for slicing) of the lat and long arrays to cover the area specified with min/max lat/long. Think of the latitudes and longitudes describing a 2d surface. The area returned is, if possible, just big enough to cover the inputed max/min area. (This will not be possible if the max/min area has a section outside of the latitudes/longitudes area.) asset longitudes are sorted, long - from low to high (west to east, eg 148 - 151) assert latitudes are sorted, ascending or decending """ latitudes = latitudes_ref[:] longitudes = longitudes_ref[:] latitudes = ensure_numeric(latitudes) longitudes = ensure_numeric(longitudes) assert num.allclose(num.sort(longitudes), longitudes) #print latitudes[0],longitudes[0] #print len(latitudes),len(longitudes) #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1] lat_ascending = True if not num.allclose(num.sort(latitudes), latitudes): lat_ascending = False # reverse order of lat, so it's in ascending order latitudes = latitudes[::-1] assert num.allclose(num.sort(latitudes), latitudes) largest_lat_index = len(latitudes) - 1 #Cut out a smaller extent. if minlat is None: lat_min_index = 0 else: lat_min_index = num.searchsorted(latitudes, minlat) - 1 if lat_min_index < 0: lat_min_index = 0 if maxlat is None: lat_max_index = largest_lat_index #len(latitudes) else: lat_max_index = num.searchsorted(latitudes, maxlat) if lat_max_index > largest_lat_index: lat_max_index = largest_lat_index if minlon is None: lon_min_index = 0 else: lon_min_index = num.searchsorted(longitudes, minlon) - 1 if lon_min_index < 0: lon_min_index = 0 if maxlon is None: lon_max_index = len(longitudes) else: lon_max_index = num.searchsorted(longitudes, maxlon) # Reversing the indexes, if the lat array is decending if lat_ascending is False: lat_min_index, lat_max_index = largest_lat_index - lat_max_index, \ largest_lat_index - lat_min_index lat_max_index = lat_max_index + 1 # taking into account how slicing works lon_max_index = lon_max_index + 1 # taking into account how slicing works return lat_min_index, lat_max_index, lon_min_index, lon_max_index
def lon_lat2grid(long_lat_dep): """ given a list of points that are assumed to be an a grid, return the long's and lat's of the grid. long_lat_dep is an array where each row is a position. The first column is longitudes. The second column is latitudes. The latitude is the fastest varying dimension - in mux files """ LONG = 0 LAT = 1 QUANTITY = 2 long_lat_dep = ensure_numeric(long_lat_dep, num.float) num_points = long_lat_dep.shape[0] this_rows_long = long_lat_dep[0,LONG] # Count the length of unique latitudes i = 0 while long_lat_dep[i,LONG] == this_rows_long and i < num_points: i += 1 # determine the lats and longsfrom the grid lat = long_lat_dep[:i, LAT] long = long_lat_dep[::i, LONG] lenlong = len(long) lenlat = len(lat) msg = 'Input data is not gridded' assert num_points % lenlat == 0, msg assert num_points % lenlong == 0, msg # Test that data is gridded for i in range(lenlong): msg = 'Data is not gridded. It must be for this operation' first = i * lenlat last = first + lenlat assert num.allclose(long_lat_dep[first:last,LAT], lat), msg assert num.allclose(long_lat_dep[first:last,LONG], long[i]), msg msg = 'Out of range latitudes/longitudes' for l in lat:assert -90 < l < 90 , msg for l in long:assert -180 < l < 180 , msg # Changing quantity from lat being the fastest varying dimension to # long being the fastest varying dimension # FIXME - make this faster/do this a better way # use numeric transpose, after reshaping the quantity vector quantity = num.zeros(num_points, num.float) for lat_i, _ in enumerate(lat): for long_i, _ in enumerate(long): q_index = lat_i*lenlong + long_i lld_index = long_i*lenlat + lat_i temp = long_lat_dep[lld_index, QUANTITY] quantity[q_index] = temp return long, lat, quantity
def __init__(self, domain, end_points=None, exchange_lines=None, enquiry_points=None, invert_elevations=None, width=None, height=None, diameter=None, z1=None,#added by PM 4/10/2013 z2=None,#added by PM 4/10/2013 blockage=None,#added by PM 24/7/2016 apron=None, manning=None, enquiry_gap=None, use_momentum_jet=False, zero_outflow_momentum=True, use_old_momentum_method=True, always_use_Q_wetdry_adjustment=True, force_constant_inlet_elevations=False, description=None, label=None, structure_type=None, logging=None, verbose=None): """ exchange_lines define the input lines for each inlet. If end_points = None, then the culvert_vector is calculated in the directions from the centre of echange_line[0] to centre of exchange_line[1} If end_points != None, then culvert_vector is unit vector in direction end_point[1] - end_point[0] """ anuga.Operator.__init__(self,domain) self.master_proc = 0 self.end_points = ensure_numeric(end_points) self.exchange_lines = ensure_numeric(exchange_lines) self.enquiry_points = ensure_numeric(enquiry_points) self.invert_elevations = ensure_numeric(invert_elevations) assert self.end_points == None or self.exchange_lines == None if height is None: height = width if width is None: width = diameter if apron is None: apron = width assert width is not None self.width = width self.height = height self.diameter = diameter self.z1 = z1 #added by PM 4/10/2013 self.z2 = z2 #added by PM 4/10/2013 self.blockage = blockage #added by PM 24/7/2016 self.apron = apron self.manning = manning self.enquiry_gap = enquiry_gap self.use_momentum_jet = use_momentum_jet self.zero_outflow_momentum = zero_outflow_momentum if use_momentum_jet and zero_outflow_momentum: msg = "Can't have use_momentum_jet and zero_outflow_momentum both True" raise Exception(msg) self.use_old_momentum_method = use_old_momentum_method self.always_use_Q_wetdry_adjustment = always_use_Q_wetdry_adjustment if description == None: self.description = ' ' else: self.description = description if label == None: self.label = "structure_%g" % Structure_operator.counter else: self.label = label + '_%g' % Structure_operator.counter if structure_type == None: self.structure_type = 'generic structure' else: self.structure_type = structure_type self.verbose = verbose # Keep count of structures Structure_operator.counter += 1 # Slots for recording current statistics self.accumulated_flow = 0.0 self.discharge = 0.0 self.discharge_abs_timemean = 0.0 self.velocity = 0.0 self.outlet_depth = 0.0 self.delta_total_energy = 0.0 self.driving_energy = 0.0 if exchange_lines is not None: self.__process_skew_culvert() elif end_points is not None: self.__process_non_skew_culvert() else: raise Exception, 'Define either exchange_lines or end_points' self.inlets = [] line0 = self.exchange_lines[0] #self.inlet_lines[0] if self.apron is None: poly0 = line0 else: offset = -self.apron*self.outward_vector_0 #print line0 #print offset poly0 = num.array([ line0[0], line0[1], line0[1]+offset, line0[0]+offset]) #print poly0 if self.invert_elevations is None: invert_elevation0 = None else: invert_elevation0 = self.invert_elevations[0] enquiry_point0 = self.enquiry_points[0] #outward_vector0 = - self.culvert_vector self.inlets.append(inlet_enquiry.Inlet_enquiry( self.domain, poly0, enquiry_point0, invert_elevation = invert_elevation0, outward_culvert_vector = self.outward_vector_0, verbose = self.verbose)) if force_constant_inlet_elevations: # Try to enforce a constant inlet elevation inlet_global_elevation = self.inlets[-1].get_average_elevation() self.inlets[-1].set_elevations(inlet_global_elevation) tris_0 = self.inlets[0].triangle_indices #print tris_0 #print self.domain.centroid_coordinates[tris_0] line1 = self.exchange_lines[1] if self.apron is None: poly1 = line1 else: offset = -self.apron*self.outward_vector_1 #print line1 #print offset poly1 = num.array([ line1[0], line1[1], line1[1]+offset, line1[0]+offset]) #print poly1 if self.invert_elevations is None: invert_elevation1 = None else: invert_elevation1 = self.invert_elevations[1] enquiry_point1 = self.enquiry_points[1] self.inlets.append(inlet_enquiry.Inlet_enquiry( self.domain, poly1, enquiry_point1, invert_elevation = invert_elevation1, outward_culvert_vector = self.outward_vector_1, verbose = self.verbose)) if force_constant_inlet_elevations: # Try to enforce a constant inlet elevation inlet_global_elevation = self.inlets[-1].get_average_elevation() self.inlets[-1].set_elevations(inlet_global_elevation) tris_1 = self.inlets[1].triangle_indices self.set_logging(logging)
def Weir_orifice_trapezoid_operator( domain, losses, width, blockage=0.0, barrels=1.0, z1=None, z2=None, height=None, end_points=None, exchange_lines=None, enquiry_points=None, invert_elevations=None, #culvert_slope=None, apron=0.1, manning=0.013, enquiry_gap=0.0, smoothing_timescale=0.0, use_momentum_jet=True, use_velocity_head=True, description=None, label=None, structure_type='weir_orifice_trapezoid', logging=False, verbose=False, master_proc=0, procs=None): # If not parallel domain then allocate serial Weir orifice trapezoid operator if isinstance(domain, Parallel_domain) is False: if verbose: print "Allocating non parallel weir orifice trapzezoid operator ....." return anuga.structures.weir_orifice_trapezoid_operator.Weir_orifice_trapezoid_operator( domain=domain, losses=losses, width=width, height=height, blockage=blockage, barrels=barrels, z1=z1, z2=z2, #culvert_slope=culvert_slope, end_points=end_points, exchange_lines=exchange_lines, enquiry_points=enquiry_points, invert_elevations=invert_elevations, apron=apron, manning=manning, enquiry_gap=enquiry_gap, smoothing_timescale=smoothing_timescale, use_momentum_jet=use_momentum_jet, use_velocity_head=use_velocity_head, description=description, label=label, structure_type=structure_type, logging=logging, verbose=verbose) import pypar if procs is None: procs = range(0, pypar.size()) myid = pypar.rank() end_points = ensure_numeric(end_points) exchange_lines = ensure_numeric(exchange_lines) enquiry_points = ensure_numeric(enquiry_points) if height is None: height = width diameter = None if apron is None: apron = width # Calculate location of inlet enquiry points and exchange lines if myid == master_proc: if exchange_lines is not None: exchange_lines_tmp = exchange_lines enquiry_points_tmp = __process_skew_culvert( exchange_lines, end_points, enquiry_points, apron, enquiry_gap) for i in procs: if i == master_proc: continue pypar.send(enquiry_points_tmp, i) elif end_points is not None: exchange_lines_tmp, enquiry_points_tmp = __process_non_skew_culvert( end_points, width, enquiry_points, apron, enquiry_gap) for i in procs: if i == master_proc: continue pypar.send(exchange_lines_tmp, i) pypar.send(enquiry_points_tmp, i) else: raise Exception, 'Define either exchange_lines or end_points' else: if exchange_lines is not None: exchange_lines_tmp = exchange_lines enquiry_points_tmp = pypar.receive(master_proc) elif end_points is not None: exchange_lines_tmp = pypar.receive(master_proc) enquiry_points_tmp = pypar.receive(master_proc) # Determine processors associated with first inlet line0 = exchange_lines_tmp[0] enquiry_point0 = enquiry_points_tmp[0] alloc0, inlet0_master_proc, inlet0_procs, enquiry0_proc = allocate_inlet_procs( domain, line0, enquiry_point=enquiry_point0, master_proc=master_proc, procs=procs, verbose=verbose) # Determine processors associated with second inlet line1 = exchange_lines_tmp[1] enquiry_point1 = enquiry_points_tmp[1] alloc1, inlet1_master_proc, inlet1_procs, enquiry1_proc = allocate_inlet_procs( domain, line1, enquiry_point=enquiry_point1, master_proc=master_proc, procs=procs, verbose=verbose) structure_procs = list(set(inlet0_procs + inlet1_procs)) inlet_master_proc = [inlet0_master_proc, inlet1_master_proc] inlet_procs = [inlet0_procs, inlet1_procs] enquiry_proc = [enquiry0_proc, enquiry1_proc] if myid == master_proc and verbose: print "Parallel Weir Orifice Trapezoid Operator =============================" print "Structure Master Proc is P" + str(inlet0_master_proc) print "Structure Procs are P" + str(structure_procs) print "Inlet Master Procs are P" + str(inlet_master_proc) print "Inlet Procs are P" + str(inlet_procs[0]) + " and " + str( inlet_procs[1]) print "Inlet Enquiry Procs are P" + str(enquiry_proc) print "Enquiry Points are " + str(enquiry_point0) + " and " + str( enquiry_point1) print "Inlet Exchange Lines are " + str(line0) + " and " + str(line1) print "========================================================" if alloc0 or alloc1: return Parallel_Weir_orifice_trapezoid_operator( domain=domain, losses=losses, width=width, height=height, blockage=blockage, barrels=barrels, z1=z1, z2=z2, #culvert_slope=culvert_slope, end_points=end_points, exchange_lines=exchange_lines, enquiry_points=enquiry_points, invert_elevations=invert_elevations, apron=apron, manning=manning, enquiry_gap=enquiry_gap, smoothing_timescale=smoothing_timescale, use_momentum_jet=use_momentum_jet, use_velocity_head=use_velocity_head, description=description, label=label, structure_type=structure_type, logging=logging, verbose=verbose, master_proc=inlet0_master_proc, procs=structure_procs, inlet_master_proc=inlet_master_proc, inlet_procs=inlet_procs, enquiry_proc=enquiry_proc) else: return None