def test_fit_to_mesh_w_georef(self): """Simple check that georef works at the fit_to_mesh level """ from anuga.coordinate_transforms.geo_reference import Geo_reference #Mesh vertex_coordinates = [[0.76, 0.76], [0.76, 5.76], [5.76, 0.76]] triangles = [[0, 2, 1]] mesh_geo = Geo_reference(56, -0.76, -0.76) #print "mesh_geo.get_absolute(vertex_coordinates)", \ # mesh_geo.get_absolute(vertex_coordinates) #Data data_points = [[201.0, 401.0], [201.0, 403.0], [203.0, 401.0]] z = [2, 4, 4] data_geo = Geo_reference(56, -200, -400) #print "data_geo.get_absolute(data_points)", \ # data_geo.get_absolute(data_points) #Fit zz = fit_to_mesh(data_points, vertex_coordinates=vertex_coordinates, triangles=triangles, point_attributes=z, data_origin=data_geo.get_origin(), mesh_origin=mesh_geo.get_origin(), alpha=0) assert num.allclose(zz, [0, 5, 5])
def concept_create_mesh_from_regions_with_ungenerate(self): x = 0 y = 0 mesh_geo = geo_reference = Geo_reference(56, x, y) # These are the absolute values polygon_absolute = [[0, 0], [100, 0], [100, 100], [0, 100]] x_p = -10 y_p = -40 geo_ref_poly = Geo_reference(56, x_p, y_p) polygon = geo_ref_poly.change_points_geo_ref(polygon_absolute) boundary_tags = {'walls': [0, 1], 'bom': [2]} inner1_polygon_absolute = [[10, 10], [20, 10], [20, 20], [10, 20]] inner1_polygon = geo_ref_poly.\ change_points_geo_ref(inner1_polygon_absolute) inner2_polygon_absolute = [[30, 30], [40, 30], [40, 40], [30, 40]] inner2_polygon = geo_ref_poly.\ change_points_geo_ref(inner2_polygon_absolute) max_area = 10000000 interior_regions = [(inner1_polygon, 5), (inner2_polygon, 10)] m = create_mesh_from_regions(polygon, boundary_tags, max_area, interior_regions=interior_regions, poly_geo_reference=geo_ref_poly, mesh_geo_reference=mesh_geo) m.export_mesh_file('a_test_mesh_iknterface.tsh') fileName = tempfile.mktemp('.txt') file = open(fileName, 'w') file.write(' 1 ?? ??\n\ 90.0 90.0\n\ 81.0 90.0\n\ 81.0 81.0\n\ 90.0 81.0\n\ 90.0 90.0\n\ END\n\ 2 ?? ??\n\ 10.0 80.0\n\ 10.0 90.0\n\ 20.0 90.0\n\ 10.0 80.0\n\ END\n\ END\n') file.close() m.import_ungenerate_file(fileName, tag='wall') os.remove(fileName) m.generate_mesh(maximum_triangle_area=max_area, verbose=False) m.export_mesh_file('b_test_mesh_iknterface.tsh')
def test_triangulation_2_geo_refs(self): # # filename = tempfile.mktemp("_data_manager.sww") outfile = NetCDFFile(filename, netcdf_mode_w) points_utm = num.array([[0., 0.], [1., 1.], [0., 1.]]) volumes = [[0, 1, 2]] elevation = [0, 1, 2] new_origin = Geo_reference(56, 1, 1) points_georeference = Geo_reference(56, 0, 0) points_utm = points_georeference.change_points_geo_ref(points_utm) times = [0, 10] number_of_volumes = len(volumes) number_of_points = len(points_utm) sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) sww.store_header(outfile, times, number_of_volumes, number_of_points, description='fully sick testing', verbose=self.verbose, sww_precision=netcdf_float) sww.store_triangulation(outfile, points_utm, volumes, elevation, new_origin=new_origin, points_georeference=points_georeference, verbose=self.verbose) outfile.close() fid = NetCDFFile(filename) x = fid.variables['x'][:] y = fid.variables['y'][:] results_georef = Geo_reference() results_georef.read_NetCDF(fid) assert results_georef == new_origin fid.close() absolute = Geo_reference(56, 0, 0) assert num.allclose( num.array( absolute.change_points_geo_ref(map(None, x, y), new_origin)), points_utm) os.remove(filename)
def convert_from_latlon_to_utm(points=None, latitudes=None, longitudes=None, false_easting=None, false_northing=None): """Convert latitude and longitude data to UTM as a list of coordinates. Input points: list of points given in decimal degrees (latitude, longitude) or latitudes: list of latitudes and longitudes: list of longitudes false_easting (optional) false_northing (optional) Output points: List of converted points zone: Common UTM zone for converted points Notes Assume the false_easting and false_northing are the same for each list. If points end up in different UTM zones, an ANUGAerror is thrown. """ old_geo = Geo_reference() utm_points = [] if points is None: assert len(latitudes) == len(longitudes) points = list(zip(latitudes, longitudes)) for point in points: zone, easting, northing = redfearn(float(point[0]), float(point[1]), false_easting=false_easting, false_northing=false_northing) new_geo = Geo_reference(zone) old_geo.reconcile_zones(new_geo) utm_points.append([easting, northing]) return utm_points, old_geo.get_zone()
def test_triangulationII(self): # # filename = tempfile.mktemp("_data_manager.sww") outfile = NetCDFFile(filename, netcdf_mode_w) points_utm = num.array([[0., 0.], [1., 1.], [0., 1.]]) volumes = [[0, 1, 2]] elevation = [0, 1, 2] new_origin = None #new_origin = Geo_reference(56, 0, 0) times = [0, 10] number_of_volumes = len(volumes) number_of_points = len(points_utm) sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) sww.store_header(outfile, times, number_of_volumes, number_of_points, description='fully sick testing', verbose=self.verbose, sww_precision=netcdf_float) sww.store_triangulation(outfile, points_utm, volumes, new_origin=new_origin, verbose=self.verbose) sww.store_static_quantities(outfile, elevation=elevation) outfile.close() fid = NetCDFFile(filename) x = fid.variables['x'][:] y = fid.variables['y'][:] results_georef = Geo_reference() results_georef.read_NetCDF(fid) assert results_georef == Geo_reference(zone=None, xllcorner=0, yllcorner=0) fid.close() assert num.allclose(num.array(list(zip(x, y))), points_utm) os.remove(filename)
def points_needed(seg, ll_lat, ll_long, grid_spacing, lat_amount, long_amount, zone, isSouthHemisphere): """ seg is two points, in UTM return a list of the points, in lats and longs that are needed to interpolate any point on the segment. """ from math import sqrt geo_reference = Geo_reference(zone=zone) geo = Geospatial_data(seg, geo_reference=geo_reference) seg_lat_long = geo.get_data_points(as_lat_long=True, isSouthHemisphere=isSouthHemisphere) # 1.415 = 2^0.5, rounded up.... sqrt_2_rounded_up = 1.415 buffer = sqrt_2_rounded_up * grid_spacing max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer first_row = old_div((min_long - ll_long), grid_spacing) # To round up first_row_long = int(round(first_row + 0.5)) last_row = old_div((max_long - ll_long), grid_spacing) # round down last_row_long = int(round(last_row)) first_row = old_div((min_lat - ll_lat), grid_spacing) # To round up first_row_lat = int(round(first_row + 0.5)) last_row = old_div((max_lat - ll_lat), grid_spacing) # round down last_row_lat = int(round(last_row)) max_distance = 157147.4112 * grid_spacing points_lat_long = [] # Create a list of the lat long points to include. for index_lat in range(first_row_lat, last_row_lat + 1): for index_long in range(first_row_long, last_row_long + 1): lat = ll_lat + index_lat * grid_spacing long = ll_long + index_long * grid_spacing #filter here to keep good points if keep_point(lat, long, seg, max_distance): points_lat_long.append((lat, long)) #must be hashable # Now that we have these points, lets throw ones out that are too far away return points_lat_long
def test_fit_to_mesh_file3(self): from anuga.load_mesh.loadASCII import import_mesh_file, \ export_mesh_file import tempfile import os # create a .tsh file, no user outline mesh_dic = {} mesh_dic['vertices'] = [[0.76, 0.76], [0.76, 5.76], [5.76, 0.76]] mesh_dic['triangles'] = [[0, 2, 1]] mesh_dic['segments'] = [[0, 1], [2, 0], [1, 2]] mesh_dic['triangle_tags'] = [''] mesh_dic['vertex_attributes'] = [[], [], []] mesh_dic['vertiex_attribute_titles'] = [] mesh_dic['triangle_neighbors'] = [[-1, -1, -1]] mesh_dic['segment_tags'] = ['external', 'external', 'external'] mesh_dic['geo_reference'] = Geo_reference(56,-0.76,-0.76) mesh_file = tempfile.mktemp(".tsh") export_mesh_file(mesh_file,mesh_dic) # create a points .csv file point_file = tempfile.mktemp(".csv") fd = open(point_file,'w') fd.write("x,y, elevation, stage \n\ 1.0, 1.0,2.,4 \n\ 1.0, 3.0,4,8 \n\ 3.0,1.0,4.,8 \n") fd.close() mesh_output_file = tempfile.mktemp(".tsh") fit_to_mesh_file(mesh_file, point_file, mesh_output_file, alpha = 0.0) # load in the .tsh file we just wrote mesh_dic = import_mesh_file(mesh_output_file) #print "mesh_dic",mesh_dic ans =[[0.0, 0.0], [5.0, 10.0], [5.0,10.0]] assert num.allclose(mesh_dic['vertex_attributes'],ans) self.assertTrue(mesh_dic['vertex_attribute_titles'] == ['elevation','stage'], 'test_fit_to_mesh_file failed') #clean up os.remove(mesh_file) os.remove(point_file) os.remove(mesh_output_file)
def test_get_vertex_coordinates_with_geo_ref(self): x0 = 314036.58727982 y0 = 6224951.2960092 geo = Geo_reference(56, x0, y0) a = [0.0, 0.0] b = [0.0, 2.0] c = [2.0, 0.0] d = [0.0, 4.0] e = [2.0, 2.0] f = [4.0, 0.0] nodes = num.array([a, b, c, d, e, f]) nodes_absolute = geo.get_absolute(nodes) # bac, bce, ecf, dbe triangles = num.array([[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]], num.int) domain = General_mesh(nodes, triangles, geo_reference=geo) verts = domain.get_vertex_coordinates(triangle_id=0) # bac msg = ("num.array([b,a,c])=\n%s\nshould be close to 'verts'=\n%s" % (str(num.array([b, a, c])), str(verts))) self.assertTrue(num.allclose(num.array([b, a, c]), verts), msg) verts = domain.get_vertex_coordinates(triangle_id=0) msg = ("num.array([b,a,c])=\n%s\nshould be close to 'verts'=\n%s" % (str(num.array([b, a, c])), str(verts))) self.assertTrue(num.allclose(num.array([b, a, c]), verts), msg) verts = domain.get_vertex_coordinates(triangle_id=0, absolute=True) msg = ("num.array([...])=\n%s\nshould be close to 'verts'=\n%s" % (str( num.array([ nodes_absolute[1], nodes_absolute[0], nodes_absolute[2] ])), str(verts))) self.assertTrue( num.allclose( num.array( [nodes_absolute[1], nodes_absolute[0], nodes_absolute[2]]), verts), msg) verts = domain.get_vertex_coordinates(triangle_id=0, absolute=True) msg = ("num.array([...])=\n%s\nshould be close to 'verts'=\n%s" % (str( num.array([ nodes_absolute[1], nodes_absolute[0], nodes_absolute[2] ])), str(verts))) self.assertTrue( num.allclose( num.array( [nodes_absolute[1], nodes_absolute[0], nodes_absolute[2]]), verts), msg)
def test_fit_and_interpolation_with_different_origins(self): """Fit a surface to one set of points. Then interpolate that surface using another set of points. This test tests situtaion where points and mesh belong to a different coordinate system as defined by origin. """ #Setup mesh used to represent fitted function a = [0.0, 0.0] b = [0.0, 2.0] c = [2.0, 0.0] d = [0.0, 4.0] e = [2.0, 2.0] f = [4.0, 0.0] points = [a, b, c, d, e, f] #bac, bce, ecf, dbe, daf, dae triangles = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]] #Datapoints to fit from data_points1 = [[0.66666667, 0.66666667], [1.33333333, 1.33333333], [2.66666667, 0.66666667], [0.66666667, 2.66666667], [0.0, 1.0], [0.0, 3.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [2.0, 1.0], [3.0, 0.0], [3.0, 1.0]] #First check that things are OK when using same origin mesh_origin = (56, 290000, 618000) #zone, easting, northing data_origin = (56, 290000, 618000) #zone, easting, northing #Fit surface to mesh interp = Fit(points, triangles, alpha=0.0, mesh_origin=mesh_origin) data_geo_spatial = Geospatial_data(data_points1, geo_reference=Geo_reference( 56, 290000, 618000)) z = linear_function(data_points1) #Example z-values f = interp.fit(data_geo_spatial, z) #Fitted values at vertices #Shift datapoints according to new origins for k in range(len(data_points1)): data_points1[k][0] += mesh_origin[1] - data_origin[1] data_points1[k][1] += mesh_origin[2] - data_origin[2] #Fit surface to mesh interp = Fit(points, triangles, alpha=0.0) #Fitted values at vertices (using same z as before) f1 = interp.fit(data_points1, z) assert num.allclose(f, f1), 'Fit should have been unaltered'
def test_assert_index_in_nodes(self): """test_assert_index_in_nodes - Test that node indices in triangles are within nodes array. """ x0 = 314036.58727982 y0 = 6224951.2960092 geo = Geo_reference(56, x0, y0) a = [0.0, 0.0] b = [0.0, 2.0] c = [2.0, 0.0] d = [0.0, 4.0] e = [2.0, 2.0] f = [4.0, 0.0] nodes = num.array([a, b, c, d, e, f]) nodes_absolute = geo.get_absolute(nodes) # max index is 5, use 5, expect success triangles = num.array([[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]) General_mesh(nodes, triangles, geo_reference=geo) # should fail with negative area triangles = num.array([[0, 1, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]) self.assertRaises(AssertionError, General_mesh, nodes, triangles, geo_reference=geo) # max index is 5, use 6, expect assert failure triangles = num.array([[1, 6, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]) self.assertRaises(AssertionError, General_mesh, nodes, triangles, geo_reference=geo) # max index is 5, use 10, expect assert failure triangles = num.array([[1, 10, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]) self.assertRaises(AssertionError, General_mesh, nodes, triangles, geo_reference=geo)
def test_get_node(self): """test_get_triangles_and_vertices_per_node - Test that tuples of triangle, vertex can be extracted from inverted triangles structure """ x0 = 314036.58727982 y0 = 6224951.2960092 geo = Geo_reference(56, x0, y0) a = [0.0, 0.0] b = [0.0, 2.0] c = [2.0, 0.0] d = [0.0, 4.0] e = [2.0, 2.0] f = [4.0, 0.0] nodes = num.array([a, b, c, d, e, f]) nodes_absolute = geo.get_absolute(nodes) # bac, bce, ecf, dbe triangles = num.array([[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]) domain = General_mesh(nodes, triangles, geo_reference=geo) node = domain.get_node(2) msg = ('\nc=%s\nnode=%s' % (str(c), str(node))) self.assertTrue(num.alltrue(c == node), msg) # repeat get_node(), see if result same node = domain.get_node(2) msg = ('\nc=%s\nnode=%s' % (str(c), str(node))) self.assertTrue(num.alltrue(c == node), msg) node = domain.get_node(2, absolute=True) msg = ('\nnodes_absolute[2]=%s\nnode=%s' % (str(nodes_absolute[2]), str(node))) self.assertTrue(num.alltrue(nodes_absolute[2] == node), msg) # repeat get_node(2, absolute=True), see if result same node = domain.get_node(2, absolute=True) msg = ('\nnodes_absolute[2]=%s\nnode=%s' % (str(nodes_absolute[2]), str(node))) self.assertTrue(num.alltrue(nodes_absolute[2] == node), msg)
def __init__(self, regions, default=0.0, geo_reference=None): """Create instance of a polygon function. regions A list of (x,y) tuples defining a polygon. default Value or function returning value for points outside poly. geo_reference ?? """ try: len(regions) except: msg = ('Polygon_function takes a list of pairs (polygon, value).' 'Got %s' % str(regions)) raise_(Exception, msg) first_region = regions[0] if isinstance(first_region, basestring): msg = ('You passed in a list of text values into polygon_function ' 'instead of a list of pairs (polygon, value): "%s"' % str(first_region)) raise_(Exception, msg) try: num_region_components = len(first_region) except: msg = ('Polygon_function takes a list of pairs (polygon, value). ' 'Got %s' % str(num_region_components)) raise_(Exception, msg) msg = ('Each entry in regions have two components: (polygon, value). ' 'I got %s' % str(num_region_components)) assert num_region_components == 2, msg if geo_reference is None: from anuga.coordinate_transforms.geo_reference import Geo_reference geo_reference = Geo_reference() self.default = default # Make points in polygons relative to geo_reference self.regions = [] for polygon, value in regions: georeffed_poly = geo_reference.change_points_geo_ref(polygon) self.regions.append((georeffed_poly, value))
def test_get_edge_midpoint_coordinates_with_geo_ref(self): x0 = 314036.58727982 y0 = 6224951.2960092 geo = Geo_reference(56, x0, y0) a = num.array([0.0, 0.0]) b = num.array([0.0, 2.0]) c = num.array([2.0, 0.0]) d = num.array([0.0, 4.0]) e = num.array([2.0, 2.0]) f = num.array([4.0, 0.0]) nodes = num.array([a, b, c, d, e, f]) nodes_absolute = geo.get_absolute(nodes) # bac, bce, ecf, dbe triangles = num.array([[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]], num.int) domain = General_mesh(nodes, triangles, geo_reference=geo) verts = domain.get_edge_midpoint_coordinates(triangle_id=0) # bac msg = ( "num.array(1/2[a+c,b+c,a+b])=\n%s\nshould be close to 'verts'=\n%s" % (str(num.array([0.5 * (a + c), 0.5 * (b + c), 0.5 * (a + b)])), str(verts))) self.assertTrue( num.allclose( num.array([0.5 * (a + c), 0.5 * (b + c), 0.5 * (a + b)]), verts), msg) verts = domain.get_edge_midpoint_coordinates(triangle_id=0, absolute=True) msg = ("num.array([...])=\n%s\nshould be close to 'verts'=\n%s" % (str(0.5 * num.array([ nodes_absolute[0] + nodes_absolute[2], nodes_absolute[1] + nodes_absolute[2], nodes_absolute[1] + nodes_absolute[0] ])), str(verts))) self.assertTrue( num.allclose( 0.5 * num.array([ nodes_absolute[0] + nodes_absolute[2], nodes_absolute[1] + nodes_absolute[2], nodes_absolute[1] + nodes_absolute[0] ]), verts), msg)
def test_urs_ungridded2sww_mint_maxtII(self): #Zone: 50 #Easting: 240992.578 Northing: 7620442.472 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' lat_long = [[-21.5, 114.5], [-21, 114.5], [-21, 115]] time_step_count = 6 time_step = 100 tide = 9000000 base_name, files = self.write_mux(lat_long, time_step_count, time_step) urs_ungridded2sww(base_name, mean_stage=tide, origin=(50, 23432, 4343), mint=0, maxt=100000) # now I want to check the sww file ... sww_file = base_name + '.sww' #Let's interigate the sww file # Note, the sww info is not gridded. It is point data. fid = NetCDFFile(sww_file) # Make x and y absolute geo_reference = Geo_reference(NetCDFObject=fid) points = geo_reference.get_absolute( list(zip(fid.variables['x'][:], fid.variables['y'][:]))) points = ensure_numeric(points) x = points[:, 0] #Check the time vector times = fid.variables['time'][:] times_actual = [0, 100, 200, 300, 400, 500] assert num.allclose(ensure_numeric(times), ensure_numeric(times_actual)) #Check first value stage = fid.variables['stage'][:] assert num.allclose(stage[0], x + tide) fid.close() self.delete_mux(files) os.remove(sww_file)
def test_create_mesh_from_regions2(self): # These are the absolute values min_x = -10 min_y = -88 polygon_absolute = [[min_x, min_y], [1000, 100], [1000, 1000], [100, 1000]] x_p = -10 y_p = -40 zone = 808 geo_ref_poly = Geo_reference(zone, x_p, y_p) polygon = geo_ref_poly.change_points_geo_ref(polygon_absolute) boundary_tags = {'walls': [0, 1], 'bom': [2, 3]} inner1_polygon_absolute = [[10, 10], [20, 10], [20, 20], [10, 20]] inner1_polygon = geo_ref_poly.\ change_points_geo_ref(inner1_polygon_absolute) inner2_polygon_absolute = [[30, 30], [40, 30], [40, 40], [30, 40]] inner2_polygon = geo_ref_poly.\ change_points_geo_ref(inner2_polygon_absolute) interior_regions = [(inner1_polygon, 5), (inner2_polygon, 10)] m = create_mesh_from_regions(polygon, boundary_tags, 10000000, interior_regions=interior_regions, poly_geo_reference=geo_ref_poly) # Test the mesh instance self.assertTrue(len(m.regions) == 3, 'FAILED!') segs = m.getUserSegments() self.assertTrue(len(segs) == 12, 'FAILED!') self.assertTrue(len(m.userVertices) == 12, 'FAILED!') self.assertTrue(segs[0].tag == 'walls', 'FAILED!') self.assertTrue(segs[1].tag == 'walls', 'FAILED!') self.assertTrue(segs[2].tag == 'bom', 'FAILED!') self.assertTrue(segs[3].tag == 'bom', 'FAILED!') self.assertTrue(m.geo_reference.get_zone() == zone, 'FAILED!') self.assertTrue(m.geo_reference.get_xllcorner() == min_x, 'FAILED!') self.assertTrue(m.geo_reference.get_yllcorner() == min_y, 'FAILED!')
def test_create_mesh_from_regions_with_duplicate_verts(self): # These are the absolute values polygon_absolute = [[0.0, 0.0], [0, 4.0], [4.0, 4.0], [4.0, 0.0], [4.0, 0.0]] x_p = -10 y_p = -40 zone = 808 geo_ref_poly = Geo_reference(zone, x_p, y_p) polygon = geo_ref_poly.change_points_geo_ref(polygon_absolute) boundary_tags = { '50': [0], '40': [1], '30': [2], 'no where seg': [3], '20': [4] } m = create_mesh_from_regions(polygon, boundary_tags, 10000000, poly_geo_reference=geo_ref_poly, verbose=False) fileName = 'badmesh.tsh'
def _create_mesh_from_regions(bounding_polygon, boundary_tags, maximum_triangle_area=None, filename=None, interior_regions=None, interior_holes=None, hole_tags=None, poly_geo_reference=None, mesh_geo_reference=None, minimum_triangle_angle=28.0, fail_if_polygons_outside=True, breaklines=None, verbose=True, regionPtArea=None): """_create_mesh_from_regions - internal function. See create_mesh_from_regions for documentation. """ # check the segment indexes - throw an error if they are out of bounds if boundary_tags is not None: max_points = len(bounding_polygon) for key in boundary_tags.keys(): if len([x for x in boundary_tags[key] if x > max_points - 1]) >= 1: msg = 'Boundary tag %s has segment out of bounds. '\ %(str(key)) msg += 'Number of points in bounding polygon = %d' % max_points raise SegmentError(msg) for i in range(max_points): found = False for tag in boundary_tags: if i in boundary_tags[tag]: found = True if found is False: msg = 'Segment %d was not assigned a boundary_tag.' % i msg += 'Default tag "exterior" will be assigned to missing segment' #raise Exception(msg) # Fixme: Use proper Python warning if verbose: log.critical('WARNING: %s' % msg) #In addition I reckon the polygons could be of class Geospatial_data #(DSG) If polygons were classes caching would break in places. # Simple check bounding_polygon = ensure_numeric(bounding_polygon, num.float) msg = 'Bounding polygon must be a list of points or an Nx2 array' assert len(bounding_polygon.shape) == 2, msg assert bounding_polygon.shape[1] == 2, msg # if interior_regions is not None: # Test that all the interior polygons are inside the # bounding_poly and throw out those that aren't fully # included. #Note, Both poly's have the same geo_ref, # therefore don't take into account # geo_ref polygons_inside_boundary = [] for interior_polygon, res in interior_regions: indices = inside_polygon(interior_polygon, bounding_polygon, closed=True, verbose=False) if len(indices) <> len(interior_polygon): msg = 'Interior polygon %s is not fully inside'\ %(str(interior_polygon)) msg += ' bounding polygon: %s.' % (str(bounding_polygon)) if fail_if_polygons_outside is True: raise PolygonError(msg) else: msg += ' I will ignore it.' log.critical(msg) else: polygons_inside_boundary.append([interior_polygon, res]) # Record only those that were fully contained interior_regions = polygons_inside_boundary # the following segment of code could be used to Test that all the # interior polygons are inside the bounding_poly... however it might need # to be change a bit # #count = 0 #for i in range(len(interior_regions)): # region = interior_regions[i] # interior_polygon = region[0] # if len(inside_polygon(interior_polygon, bounding_polygon, # closed = True, verbose = False)) <> len(interior_polygon): # print 'WARNING: interior polygon %d is outside bounding polygon' %(i) # count += 1 #if count == 0: # print 'interior regions OK' #else: # print 'check out your interior polygons' # print 'check %s in production directory' %figname # import sys; sys.exit() if interior_holes is not None: # Test that all the interior polygons are inside the bounding_poly for interior_polygon in interior_holes: # Test that we have a polygon if len(num.array(interior_polygon).flat) < 6: msg = 'Interior hole polygon %s has too few (<3) points.\n' \ %(str(interior_polygon)) msg = msg + '(Insure that you have specified a LIST of interior hole polygons)' raise PolygonError(msg) indices = inside_polygon(interior_polygon, bounding_polygon, closed=True, verbose=False) if len(indices) <> len(interior_polygon): msg = 'Interior polygon %s is outside bounding polygon: %s'\ %(str(interior_polygon), str(bounding_polygon)) raise PolygonError(msg) # Resolve geo referencing if mesh_geo_reference is None: xllcorner = min(bounding_polygon[:, 0]) yllcorner = min(bounding_polygon[:, 1]) # if poly_geo_reference is None: zone = DEFAULT_ZONE else: zone = poly_geo_reference.get_zone() [(xllcorner,yllcorner)] = poly_geo_reference.get_absolute( \ [(xllcorner,yllcorner)]) # create a geo_ref, based on the llc of the bounding_polygon mesh_geo_reference = Geo_reference(xllcorner=xllcorner, yllcorner=yllcorner, zone=zone) m = Mesh(geo_reference=mesh_geo_reference) # build a list of discrete segments from the breakline polygons if breaklines is not None: points, verts = polylist2points_verts(breaklines) m.add_points_and_segments(points, verts) # Do bounding polygon m.add_region_from_polygon(bounding_polygon, segment_tags=boundary_tags, geo_reference=poly_geo_reference) # Find one point inside region automatically if interior_regions is not None: excluded_polygons = [] for polygon, res in interior_regions: excluded_polygons.append(polygon) else: excluded_polygons = None # Convert bounding poly to absolute values # this sort of thing can be fixed with the geo_points class if poly_geo_reference is not None: bounding_polygon_absolute = \ poly_geo_reference.get_absolute(bounding_polygon) else: bounding_polygon_absolute = bounding_polygon inner_point = point_in_polygon(bounding_polygon_absolute) inner = m.add_region(inner_point[0], inner_point[1]) inner.setMaxArea(maximum_triangle_area) # Do interior regions # if interior_regions is not None: # for polygon, res in interior_regions: # m.add_region_from_polygon(polygon, # geo_reference=poly_geo_reference) # # convert bounding poly to absolute values # if poly_geo_reference is not None: # polygon_absolute = \ # poly_geo_reference.get_absolute(polygon) # else: # polygon_absolute = polygon # inner_point = point_in_polygon(polygon_absolute) # region = m.add_region(inner_point[0], inner_point[1]) # region.setMaxArea(res) if interior_regions is not None: for polygon, res in interior_regions: m.add_region_from_polygon(polygon, max_triangle_area=res, geo_reference=poly_geo_reference) # Do interior holes if interior_holes is not None: for n, polygon in enumerate(interior_holes): try: tags = hole_tags[n] except: tags = {} m.add_hole_from_polygon(polygon, segment_tags=tags, geo_reference=poly_geo_reference) # 22/04/2014 # Add user-specified point-based regions with max area if (regionPtArea is not None): for i in range(len(regionPtArea)): inner = m.add_region(regionPtArea[i][0], regionPtArea[i][1]) inner.setMaxArea(regionPtArea[i][2]) # NOTE (Ole): This was moved here as it is annoying if mesh is always # stored irrespective of whether the computation # was cached or not. This caused Domain to # recompute as it has meshfile as a dependency # Decide whether to store this mesh or return it if filename is None: return m else: if verbose: log.critical("Generating mesh to file '%s'" % filename) m.generate_mesh(minimum_triangle_angle=minimum_triangle_angle, verbose=verbose) m.export_mesh_file(filename) return m
def get_maximum_inundation_data(filename, polygon=None, time_interval=None, use_centroid_values=True, return_time=False, verbose=False): """Compute maximum run up height from sww file. filename path to SWW file to read polygon if specified resrict to points inside this polygon assumed absolute coordinates and in same zone as domain time_interval if specified resrict to within the period specified use_centroid_values verbose True if this function is to be verbose Returns (maximal_runup, maximal_runup_location). Usage: runup, location = get_maximum_inundation_data(filename, polygon=None, time_interval=None, verbose=False) Algorithm is as in get_maximum_inundation_elevation from shallow_water_domain except that this function works with the SWW file and computes the maximal runup height over multiple timesteps. If no inundation is found within polygon and time_interval the return value is None signifying "No Runup" or "Everything is dry". """ # We are using nodal values here as that is what is stored in sww files. # Water depth below which it is considered to be 0 in the model # FIXME (Ole): Allow this to be specified as a keyword argument as well from anuga.geometry.polygon import inside_polygon from anuga.config import minimum_allowed_height from anuga.file.netcdf import NetCDFFile # Just find max inundation over one file dir, base = os.path.split(filename) #iterate_over = get_all_swwfiles(dir, base) iterate_over = [filename[:-4]] if verbose: print iterate_over # Read sww file if verbose: log.critical('Reading from %s' % filename) # FIXME: Use general swwstats (when done) maximal_runup = None maximal_runup_location = None maximal_time = None for _, swwfile in enumerate(iterate_over): # Read sww file filename = os.path.join(dir, swwfile + '.sww') if verbose: log.critical('Reading from %s' % filename) # FIXME: Use general swwstats (when done) fid = NetCDFFile(filename) # Get geo_reference # sww files don't have to have a geo_ref try: geo_reference = Geo_reference(NetCDFObject=fid) except AttributeError: geo_reference = Geo_reference() # Default georef object xllcorner = geo_reference.get_xllcorner() yllcorner = geo_reference.get_yllcorner() # Get extent volumes = fid.variables['volumes'][:] x = fid.variables['x'][:] + xllcorner y = fid.variables['y'][:] + yllcorner # Get the relevant quantities (Convert from single precison) try: elevation = num.array(fid.variables['elevation_c'][:], num.float) stage = num.array(fid.variables['stage_c'][:], num.float) found_c_values = True except: elevation = num.array(fid.variables['elevation'][:], num.float) stage = num.array(fid.variables['stage'][:], num.float) found_c_values = False if verbose: print 'found c values ', found_c_values print 'stage.shape ', stage.shape print 'elevation.shape ', elevation.shape # Here's where one could convert nodal information to centroid # information but is probably something we need to write in C. # Here's a Python thought which is NOT finished!!! if use_centroid_values is True: vols0 = volumes[:, 0] vols1 = volumes[:, 1] vols2 = volumes[:, 2] # Then use these to compute centroid location x = (x[vols0] + x[vols1] + x[vols2]) / 3.0 y = (y[vols0] + y[vols1] + y[vols2]) / 3.0 if found_c_values: # don't have to do anything as found in sww file pass else: elevation = (elevation[vols0] + elevation[vols1] + elevation[vols2]) / 3.0 stage = (stage[:, vols0] + stage[:, vols1] + stage[:, vols2]) / 3.0 # Spatial restriction if polygon is not None: msg = 'polygon must be a sequence of points.' assert len(polygon[0]) == 2, msg # FIXME (Ole): Make a generic polygon input check in polygon.py # and call it here points = num.ascontiguousarray( num.concatenate((x[:, num.newaxis], y[:, num.newaxis]), axis=1)) point_indices = inside_polygon(points, polygon) # Restrict quantities to polygon elevation = num.take(elevation, point_indices, axis=0) stage = num.take(stage, point_indices, axis=1) # Get info for location of maximal runup points_in_polygon = num.take(points, point_indices, axis=0) x = points_in_polygon[:, 0] y = points_in_polygon[:, 1] else: # Take all points point_indices = num.arange(len(x)) # Temporal restriction time = fid.variables['time'][:] if verbose: print time all_timeindices = num.arange(len(time)) if time_interval is not None: msg = 'time_interval must be a sequence of length 2.' assert len(time_interval) == 2, msg msg = 'time_interval %s must not be decreasing.' % time_interval assert time_interval[1] >= time_interval[0], msg msg = 'Specified time interval [%.8f:%.8f] ' % tuple(time_interval) msg += 'must does not match model time interval: [%.8f, %.8f]\n' \ % (time[0], time[-1]) if time_interval[1] < time[0]: fid.close() raise ValueError(msg) if time_interval[0] > time[-1]: fid.close() raise ValueError(msg) # Take time indices corresponding to interval (& is bitwise AND) timesteps = num.compress((time_interval[0] <= time) \ & (time <= time_interval[1]), all_timeindices) msg = 'time_interval %s did not include any model timesteps.' \ % time_interval assert not num.alltrue(timesteps == 0), msg else: # Take them all timesteps = all_timeindices #print timesteps fid.close() # Compute maximal runup for each timestep #maximal_runup = None #maximal_runup_location = None #maximal_runups = [None] #maximal_runup_locations = [None] for i in timesteps: ## if use_centroid_values is True: ## stage_i = stage[i,:] ## else: ## stage_i = stage[i,:] stage_i = stage[i, :] depth = stage_i - elevation if verbose: print '++++++++' # Get wet nodes i.e. nodes with depth>0 within given region # and timesteps wet_nodes = num.where(depth > 0.0)[0] if verbose: print stage_i.shape print num.max(stage_i) #print max(wet_elevation) if num.alltrue(wet_nodes == 0): runup = None else: # Find maximum elevation among wet nodes wet_elevation = num.take(elevation, wet_nodes, axis=0) if verbose: pass #print wet_elevation runup_index = num.argmax(wet_elevation) runup = max(wet_elevation) if verbose: print 'max(wet_elevation) ', max(wet_elevation) assert wet_elevation[runup_index] == runup # Must be True if runup > maximal_runup: maximal_runup = runup # works even if maximal_runup is None maximal_time = time[i] # Record location wet_x = num.take(x, wet_nodes, axis=0) wet_y = num.take(y, wet_nodes, axis=0) maximal_runup_location = [wet_x[runup_index], \ wet_y[runup_index]] if verbose: print i, runup if return_time: return maximal_runup, maximal_runup_location, maximal_time else: return maximal_runup, maximal_runup_location
def test_create_mesh_from_regions_with_caching(self): x = -500 y = -1000 mesh_geo = geo_reference = Geo_reference(56, x, y) # These are the absolute values polygon_absolute = [[0, 0], [100, 0], [100, 100], [0, 100]] x_p = -10 y_p = -40 geo_ref_poly = Geo_reference(56, x_p, y_p) polygon = geo_ref_poly.change_points_geo_ref(polygon_absolute) boundary_tags = {'walls': [0, 1], 'bom': [2, 3]} inner1_polygon_absolute = [[10, 10], [20, 10], [20, 20], [10, 20]] inner1_polygon = geo_ref_poly.\ change_points_geo_ref(inner1_polygon_absolute) inner2_polygon_absolute = [[30, 30], [40, 30], [40, 40], [30, 40]] inner2_polygon = geo_ref_poly.\ change_points_geo_ref(inner2_polygon_absolute) interior_regions = [(inner1_polygon, 5), (inner2_polygon, 10)] interior_holes = None # Clear cache first from anuga.caching import cache cache(_create_mesh_from_regions, (polygon, boundary_tags), { 'minimum_triangle_angle': 28.0, 'maximum_triangle_area': 10000000, 'interior_regions': interior_regions, 'interior_holes': interior_holes, 'poly_geo_reference': geo_ref_poly, 'mesh_geo_reference': mesh_geo, 'verbose': False }, verbose=False, clear=1) m = create_mesh_from_regions(polygon, boundary_tags, maximum_triangle_area=10000000, interior_regions=interior_regions, poly_geo_reference=geo_ref_poly, mesh_geo_reference=mesh_geo, verbose=False, use_cache=True) # Test the mesh instance self.assertTrue(len(m.regions) == 3, 'FAILED!') segs = m.getUserSegments() self.assertTrue(len(segs) == 12, 'FAILED!') self.assertTrue(len(m.userVertices) == 12, 'FAILED!') self.assertTrue(segs[0].tag == 'walls', 'FAILED!') self.assertTrue(segs[1].tag == 'walls', 'FAILED!') self.assertTrue(segs[2].tag == 'bom', 'FAILED!') self.assertTrue(segs[3].tag == 'bom', 'FAILED!') # Assuming the order of the region points is known. # (This isn't true, if you consider create_mesh_from_regions # a black box) poly_point = m.getRegions()[0] # poly_point values are relative to the mesh geo-ref # make them absolute self.assertTrue( is_inside_polygon([poly_point.x + x, poly_point.y + y], polygon_absolute, closed=False), 'FAILED!') # Assuming the order of the region points is known. # (This isn't true, if you consider create_mesh_from_regions # a black box) poly_point = m.getRegions()[1] # poly_point values are relative to the mesh geo-ref # make them absolute self.assertTrue( is_inside_polygon([poly_point.x + x, poly_point.y + y], inner1_polygon_absolute, closed=False), 'FAILED!') # Assuming the order of the region points is known. # (This isn't true, if you consider create_mesh_from_regions # a black box) poly_point = m.getRegions()[2] # poly_point values are relative to the mesh geo-ref # make them absolute self.assertTrue( is_inside_polygon([poly_point.x + x, poly_point.y + y], inner2_polygon_absolute, closed=False), 'FAILED!') # Now create m using cached values m_cache = create_mesh_from_regions(polygon, boundary_tags, 10000000, interior_regions=interior_regions, poly_geo_reference=geo_ref_poly, mesh_geo_reference=mesh_geo, verbose=False, use_cache=True)
def _read_msh_file(file_name): """ Read in an msh file.""" #Check contents. Get NetCDF fd = open(file_name, 'r') fd.close() # throws prints to screen if file not present fid = NetCDFFile(file_name, netcdf_mode_r) mesh = {} # Get the variables - the triangulation try: mesh['vertices'] = fid.variables['vertices'][:] except KeyError: mesh['vertices'] = num.array([], num.int) #array default# try: mesh['vertex_attributes'] = fid.variables['vertex_attributes'][:] except KeyError: mesh['vertex_attributes'] = None mesh['vertex_attribute_titles'] = [] try: titles = fid.variables['vertex_attribute_titles'][:] mesh['vertex_attribute_titles'] = [ x.tostring().strip() for x in titles ] except KeyError: pass try: mesh['segments'] = fid.variables['segments'][:] except KeyError: mesh['segments'] = num.array([], num.int) #array default# mesh['segment_tags'] = [] try: tags = fid.variables['segment_tags'][:] mesh['segment_tags'] = [x.tostring().strip() for x in tags] except KeyError: for ob in mesh['segments']: mesh['segment_tags'].append('') try: mesh['triangles'] = fid.variables['triangles'][:] mesh['triangle_neighbors'] = fid.variables['triangle_neighbors'][:] except KeyError: mesh['triangles'] = num.array([], num.int) #array default# mesh['triangle_neighbors'] = num.array([], num.int) #array default# mesh['triangle_tags'] = [] try: tags = fid.variables['triangle_tags'][:] mesh['triangle_tags'] = [x.tostring().strip() for x in tags] except KeyError: for ob in mesh['triangles']: mesh['triangle_tags'].append('') #the outline try: mesh['points'] = fid.variables['points'][:] except KeyError: mesh['points'] = [] try: mesh['point_attributes'] = fid.variables['point_attributes'][:] except KeyError: mesh['point_attributes'] = [] for point in mesh['points']: mesh['point_attributes'].append([]) try: mesh['outline_segments'] = fid.variables['outline_segments'][:] except KeyError: mesh['outline_segments'] = num.array([], num.int) #array default# mesh['outline_segment_tags'] = [] try: tags = fid.variables['outline_segment_tags'][:] for i, tag in enumerate(tags): mesh['outline_segment_tags'].append(tags[i].tostring().strip()) except KeyError: for ob in mesh['outline_segments']: mesh['outline_segment_tags'].append('') try: mesh['holes'] = fid.variables['holes'][:] except KeyError: mesh['holes'] = num.array([], num.int) #array default# try: mesh['regions'] = fid.variables['regions'][:] except KeyError: mesh['regions'] = num.array([], num.int) #array default# mesh['region_tags'] = [] try: tags = fid.variables['region_tags'][:] for i, tag in enumerate(tags): mesh['region_tags'].append(tags[i].tostring().strip()) except KeyError: for ob in mesh['regions']: mesh['region_tags'].append('') try: mesh['region_max_areas'] = fid.variables['region_max_areas'][:] except KeyError: mesh['region_max_areas'] = num.array([], num.int) #array default# try: geo_reference = Geo_reference(NetCDFObject=fid) mesh['geo_reference'] = geo_reference except AttributeError, e: #geo_ref not compulsory mesh['geo_reference'] = None
def test_get_flow_through_cross_section_with_geo(self): """test_get_flow_through_cross_section(self): Test that the total flow through a cross section can be correctly obtained at run-time from the ANUGA domain. This test creates a flat bed with a known flow through it and tests that the function correctly returns the expected flow. The specifics are e = -1 m u = 2 m/s h = 2 m w = 3 m (width of channel) q = u*h*w = 12 m^3/s This run tries it with georeferencing and with elevation = -1 """ # Create basic mesh (20m x 3m) width = 3 length = 20 t_end = 1 points, vertices, boundary = rectangular(length, width, length, width) # Create shallow water domain domain = Domain(points, vertices, boundary, geo_reference=Geo_reference(56, 308500, 6189000)) domain.default_order = 2 domain.set_quantities_to_be_stored(None) e = -1.0 w = 1.0 h = w - e u = 2.0 uh = u * h Br = Reflective_boundary(domain) # Side walls Bd = Dirichlet_boundary([w, uh, 0]) # 2 m/s across the 3 m inlet: # Initial conditions domain.set_quantity('elevation', e) domain.set_quantity('stage', w) domain.set_quantity('xmomentum', uh) domain.set_boundary({'left': Bd, 'right': Bd, 'top': Br, 'bottom': Br}) # Interpolation points down the middle I = [[0, width / 2.], [length / 2., width / 2.], [length, width / 2.]] interpolation_points = domain.geo_reference.get_absolute(I) for t in domain.evolve(yieldstep=0.1, finaltime=0.5): # Shortcuts to quantites stage = domain.get_quantity('stage') xmomentum = domain.get_quantity('xmomentum') ymomentum = domain.get_quantity('ymomentum') # Check that quantities are they should be in the interior w_t = stage.get_values(interpolation_points) uh_t = xmomentum.get_values(interpolation_points) vh_t = ymomentum.get_values(interpolation_points) assert num.allclose(w_t, w) assert num.allclose(uh_t, uh) assert num.allclose(vh_t, 0.0, atol=1.0e-6) # Check flows through the middle for i in range(5): x = length / 2. + i * 0.23674563 # Arbitrary cross_section = [[x, 0], [x, width]] cross_section = domain.geo_reference.get_absolute( cross_section) Q = domain.get_flow_through_cross_section(cross_section, verbose=False) assert num.allclose(Q, uh * width) import cPickle cPickle.dump(domain, open('domain_pickle.pickle', 'w')) domain_restored = cPickle.load(open('domain_pickle.pickle')) for t in domain_restored.evolve(yieldstep=0.1, finaltime=1.0): # Shortcuts to quantites stage = domain_restored.get_quantity('stage') xmomentum = domain_restored.get_quantity('xmomentum') ymomentum = domain_restored.get_quantity('ymomentum') # Check that quantities are they should be in the interior w_t = stage.get_values(interpolation_points) uh_t = xmomentum.get_values(interpolation_points) vh_t = ymomentum.get_values(interpolation_points) assert num.allclose(w_t, w) assert num.allclose(uh_t, uh) assert num.allclose(vh_t, 0.0, atol=1.0e-6) # Check flows through the middle for i in range(5): x = length / 2. + i * 0.23674563 # Arbitrary cross_section = [[x, 0], [x, width]] cross_section = domain_restored.geo_reference.get_absolute( cross_section) Q = domain_restored.get_flow_through_cross_section( cross_section, verbose=False) assert num.allclose(Q, uh * width)
line = fd.readline() for index in range(int(numOfRegions)): # Read in the Max area info line = fd.readline() fragments = line.split() # The try is here for format compatibility try: fragments.pop(0) # pop off the index if len(fragments) == 0: # no max area regionmaxareas.append(None) else: regionmaxareas.append(float(fragments[0])) except (ValueError, IndexError), e: regionmaxareas.append(None) try: geo_reference = Geo_reference(ASCIIFile=fd) except: #geo_ref not compulsory geo_reference = None meshDict = {} meshDict['points'] = points meshDict['point_attributes'] = pointattributes meshDict['outline_segments'] = segments meshDict['outline_segment_tags'] = segmenttags meshDict['holes'] = holes meshDict['regions'] = regions meshDict['region_tags'] = regionattributes meshDict['region_max_areas'] = regionmaxareas meshDict['geo_reference'] = geo_reference
def test_get_energy_through_cross_section(self): """test_get_energy_through_cross_section(self): Test that the specific and total energy through a cross section can be correctly obtained from an sww file. This test creates a flat bed with a known flow through it and tests that the function correctly returns the expected energies. The specifics are u = 2 m/s h = 1 m w = 3 m (width of channel) q = u*h*w = 6 m^3/s Es = h + 0.5*v*v/g # Specific energy head [m] Et = w + 0.5*v*v/g # Total energy head [m] This test uses georeferencing """ import time, os from anuga.file.netcdf import NetCDFFile # Setup #from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular # Create basic mesh (20m x 3m) width = 3 length = 20 t_end = 1 points, vertices, boundary = rectangular(length, width, length, width) # Create shallow water domain domain = Domain(points, vertices, boundary, geo_reference=Geo_reference(56, 308500, 6189000)) domain.default_order = 2 domain.set_minimum_storable_height(0.01) domain.set_name('flowtest') swwfile = domain.get_name() + '.sww' domain.set_datadir('.') domain.format = 'sww' domain.smooth = True e = -1.0 w = 1.0 h = w - e u = 2.0 uh = u * h Br = Reflective_boundary(domain) # Side walls Bd = Dirichlet_boundary([w, uh, 0]) # 2 m/s across the 3 m inlet: domain.set_quantity('elevation', e) domain.set_quantity('stage', w) domain.set_quantity('xmomentum', uh) domain.set_boundary({'left': Bd, 'right': Bd, 'top': Br, 'bottom': Br}) for t in domain.evolve(yieldstep=1, finaltime=t_end): pass # Check that momentum is as it should be in the interior I = [[0, width / 2.], [length / 2., width / 2.], [length, width / 2.]] I = domain.geo_reference.get_absolute(I) f = file_function(swwfile, quantities=['stage', 'xmomentum', 'ymomentum'], interpolation_points=I, verbose=False) for t in range(t_end + 1): for i in range(3): #print i, t, f(t, i) assert num.allclose(f(t, i), [w, uh, 0], atol=1.0e-6) # Check energies through the middle for i in range(5): x = length / 2. + i * 0.23674563 # Arbitrary cross_section = [[x, 0], [x, width]] cross_section = domain.geo_reference.get_absolute(cross_section) time, Es = get_energy_through_cross_section(swwfile, cross_section, kind='specific', verbose=False) assert num.allclose(Es, h + 0.5 * u * u / g) time, Et = get_energy_through_cross_section(swwfile, cross_section, kind='total', verbose=False) assert num.allclose(Et, w + 0.5 * u * u / g)
def sww2dem( name_in, name_out, quantity=None, # defaults to elevation reduction=None, cellsize=10, number_of_decimal_places=None, NODATA_value=-9999.0, easting_min=None, easting_max=None, northing_min=None, northing_max=None, verbose=False, origin=None, datum='WGS84', block_size=None): """Read SWW file and convert to Digitial Elevation model format (.asc or .ers) Example (ASC): ncols 3121 nrows 1800 xllcorner 722000 yllcorner 5893000 cellsize 25 NODATA_value -9999 138.3698 137.4194 136.5062 135.5558 .......... The number of decimal places can be specified by the user to save on disk space requirements by specifying in the call to sww2dem. Also write accompanying file with same basename_in but extension .prj used to fix the UTM zone, datum, false northings and eastings. The prj format is assumed to be as Projection UTM Zone 56 Datum WGS84 Zunits NO Units METERS Spheroid WGS84 Xshift 0.0000000000 Yshift 10000000.0000000000 Parameters The parameter quantity must be the name of an existing quantity or an expression involving existing quantities. The default is 'elevation'. Quantity is not a list of quantities. If reduction is given and it's an index, sww2dem will output the quantity at that time-step. If reduction is given and it's a built in function (eg max, min, mean), then that function is used to reduce the quantity over all time-steps. If reduction is not given, reduction is set to "max" by default. datum format can be either 'asc' or 'ers' block_size - sets the number of slices along the non-time axis to process in one block. """ import sys import types from anuga.geometry.polygon import inside_polygon, outside_polygon from anuga.abstract_2d_finite_volumes.util import \ apply_expression_to_dictionary basename_in, in_ext = os.path.splitext(name_in) basename_out, out_ext = os.path.splitext(name_out) out_ext = out_ext.lower() if in_ext != '.sww': raise IOError('Input format for %s must be .sww' % name_in) if out_ext not in ['.asc', '.ers']: raise IOError('Format for %s must be either asc or ers.' % name_out) false_easting = 500000 false_northing = 10000000 if quantity is None: quantity = 'elevation' if reduction is None: reduction = max if quantity_formula.has_key(quantity): quantity = quantity_formula[quantity] if number_of_decimal_places is None: number_of_decimal_places = 3 if block_size is None: block_size = DEFAULT_BLOCK_SIZE assert (isinstance(block_size, (int, long, float))) # Read sww file if verbose: log.critical('Reading from %s' % name_in) log.critical('Output directory is %s' % name_out) from anuga.file.netcdf import NetCDFFile fid = NetCDFFile(name_in) #Get extent and reference x = num.array(fid.variables['x'][:], num.float) y = num.array(fid.variables['y'][:], num.float) volumes = num.array(fid.variables['volumes'][:], num.int) if type(reduction) is not types.BuiltinFunctionType: times = fid.variables['time'][reduction] else: times = fid.variables['time'][:] try: # works with netcdf4 number_of_timesteps = len(fid.dimensions['number_of_timesteps']) number_of_points = len(fid.dimensions['number_of_points']) except: #works with scientific.io.netcdf number_of_timesteps = fid.dimensions['number_of_timesteps'] number_of_points = fid.dimensions['number_of_points'] if origin is None: # Get geo_reference # sww files don't have to have a geo_ref try: geo_reference = Geo_reference(NetCDFObject=fid) except AttributeError, e: geo_reference = Geo_reference() # Default georef object xllcorner = geo_reference.get_xllcorner() yllcorner = geo_reference.get_yllcorner() zone = geo_reference.get_zone()
def test_create_mesh_from_regions(self): x = -500 y = -1000 mesh_geo = geo_reference = Geo_reference(56, x, y) # These are the absolute values polygon_absolute = [[0, 0], [100, 0], [100, 100], [0, 100]] x_p = -10 y_p = -40 geo_ref_poly = Geo_reference(56, x_p, y_p) polygon = geo_ref_poly.change_points_geo_ref(polygon_absolute) boundary_tags = {'walls': [0, 1], 'bom': [2, 3]} inner1_polygon_absolute = [[10, 10], [20, 10], [20, 20], [10, 20]] inner1_polygon = geo_ref_poly.\ change_points_geo_ref(inner1_polygon_absolute) inner2_polygon_absolute = [[30, 30], [40, 30], [40, 40], [30, 40]] inner2_polygon = geo_ref_poly.\ change_points_geo_ref(inner2_polygon_absolute) interior_regions = [(inner1_polygon, 5), (inner2_polygon, 10)] m = create_mesh_from_regions(polygon, boundary_tags, 10000000, interior_regions=interior_regions, poly_geo_reference=geo_ref_poly, mesh_geo_reference=mesh_geo) # Test the mesh instance self.assertTrue(len(m.regions) == 3, 'FAILED!') segs = m.getUserSegments() self.assertTrue(len(segs) == 12, 'FAILED!') self.assertTrue(len(m.userVertices) == 12, 'FAILED!') self.assertTrue(segs[0].tag == 'walls', 'FAILED!') self.assertTrue(segs[1].tag == 'walls', 'FAILED!') self.assertTrue(segs[2].tag == 'bom', 'FAILED!') self.assertTrue(segs[3].tag == 'bom', 'FAILED!') # Assuming the order of the region points is known. # (This isn't true, if you consider create_mesh_from_regions # a black box) poly_point = m.getRegions()[0] # poly_point values are relative to the mesh geo-ref # make them absolute msg = ('Expected point (%s,%s) to be inside polygon %s' % (str(poly_point.x + x), str(poly_point.y + y), str(polygon_absolute))) self.assertTrue( is_inside_polygon([poly_point.x + x, poly_point.y + y], polygon_absolute, closed=False), msg) # Assuming the order of the region points is known. # (This isn't true, if you consider create_mesh_from_regions # a black box) poly_point = m.getRegions()[1] # poly_point values are relative to the mesh geo-ref # make them absolute self.assertTrue( is_inside_polygon([poly_point.x + x, poly_point.y + y], inner1_polygon_absolute, closed=False), 'FAILED!') # Assuming the order of the region points is known. # (This isn't true, if you consider create_mesh_from_regions # a black box) poly_point = m.getRegions()[2] # poly_point values are relative to the mesh geo-ref # make them absolute self.assertTrue( is_inside_polygon([poly_point.x + x, poly_point.y + y], inner2_polygon_absolute, closed=False), 'FAILED!')
def sww2array( name_in, quantity=None, # defaults to elevation reduction=None, cellsize=10, number_of_decimal_places=None, NODATA_value=-9999.0, easting_min=None, easting_max=None, northing_min=None, northing_max=None, verbose=False, origin=None, datum='WGS84', block_size=None): """Read SWW file and convert to a numpy array (can be stored to a png file later) The parameter quantity must be the name of an existing quantity or an expression involving existing quantities. The default is 'elevation'. Quantity is not a list of quantities. If reduction is given and it's an index, sww2array will output the quantity at that time-step. If reduction is given and it's a built in function (eg max, min, mean), then that function is used to reduce the quantity over all time-steps. If reduction is not given, reduction is set to "max" by default. datum block_size - sets the number of slices along the non-time axis to process in one block. """ import sys import types from anuga.geometry.polygon import inside_polygon, outside_polygon from anuga.abstract_2d_finite_volumes.util import \ apply_expression_to_dictionary basename_in, in_ext = os.path.splitext(name_in) if in_ext != '.sww': raise IOError('Input format for %s must be .sww' % name_in) false_easting = 500000 false_northing = 10000000 if quantity is None: quantity = 'elevation' if reduction is None: reduction = max if quantity_formula.has_key(quantity): quantity = quantity_formula[quantity] if number_of_decimal_places is None: number_of_decimal_places = 3 if block_size is None: block_size = DEFAULT_BLOCK_SIZE assert (isinstance(block_size, (int, long, float))) # Read sww file if verbose: log.critical('Reading from %s' % name_in) from anuga.file.netcdf import NetCDFFile fid = NetCDFFile(name_in) #Get extent and reference x = num.array(fid.variables['x'], num.float) y = num.array(fid.variables['y'], num.float) volumes = num.array(fid.variables['volumes'], num.int) if type(reduction) is not types.BuiltinFunctionType: times = fid.variables['time'][reduction] else: times = fid.variables['time'][:] number_of_timesteps = fid.dimensions['number_of_timesteps'] number_of_points = fid.dimensions['number_of_points'] if origin is None: # Get geo_reference # sww files don't have to have a geo_ref try: geo_reference = Geo_reference(NetCDFObject=fid) except AttributeError, e: geo_reference = Geo_reference() # Default georef object xllcorner = geo_reference.get_xllcorner() yllcorner = geo_reference.get_yllcorner() zone = geo_reference.get_zone()
def test_sww2pts_centroids_de0(self): """Test that sww information can be converted correctly to pts data at specified coordinates - in this case, the centroids. """ import time, os from anuga.file.netcdf import NetCDFFile # Used for points that lie outside mesh NODATA_value = 1758323 # Setup from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular # Create shallow water domain domain = Domain(*rectangular(2, 2)) B = Transmissive_boundary(domain) domain.set_boundary({'left': B, 'right': B, 'top': B, 'bottom': B}) domain.set_name('datatest_de0') ptsfile = domain.get_name() + '_elevation.pts' swwfile = domain.get_name() + '.sww' domain.set_datadir('.') domain.format = 'sww' domain.set_quantity('elevation', lambda x, y: -x - y) domain.geo_reference = Geo_reference(56, 308500, 6189000) sww = SWW_file(domain) sww.store_connectivity() sww.store_timestep() #self.domain.tight_slope_limiters = 1 domain.evolve_to_end(finaltime=0.01) sww.store_timestep() # Check contents in NetCDF fid = NetCDFFile(sww.filename, netcdf_mode_r) # Get the variables x = fid.variables['x'][:] y = fid.variables['y'][:] elevation = fid.variables['elevation'][:] time = fid.variables['time'][:] stage = fid.variables['stage'][:] volumes = fid.variables['volumes'][:] # Invoke interpolation for vertex points points = num.concatenate((x[:, num.newaxis], y[:, num.newaxis]), axis=1) points = num.ascontiguousarray(points) sww2pts(domain.get_name() + '.sww', quantity='elevation', data_points=points, NODATA_value=NODATA_value) ref_point_values = elevation point_values = Geospatial_data(ptsfile).get_attributes() #print 'P', point_values #print 'Ref', ref_point_values assert num.allclose(point_values, ref_point_values) # Invoke interpolation for centroids points = domain.get_centroid_coordinates() #print points sww2pts(domain.get_name() + '.sww', quantity='elevation', data_points=points, NODATA_value=NODATA_value) #ref_point_values = [-0.5, -0.5, -1, -1, -1, -1, -1.5, -1.5] #At centroids ref_point_values = [ -0.77777777, -0.77777777, -0.99999998, -0.99999998, -0.99999998, -0.99999998, -1.22222221, -1.22222221 ] point_values = Geospatial_data(ptsfile).get_attributes() #print 'P', point_values #print 'Ref', ref_point_values assert num.allclose(point_values, ref_point_values) fid.close() #Cleanup os.remove(sww.filename) os.remove(ptsfile)
def setUp(self): self.dict = {} self.dict['outline_segments'] = [(0, 1), (1, 2), (0, 2), (0, 3)] self.dict['outline_segment_tags'] = ['50', '40', '30', '20'] self.dict['holes'] = [(0.2, 0.6)] self.dict['point_attributes'] = [[5, 2], [4, 2], [3, 2], [2, 2]] self.dict['regions'] = [(0.3, 0.3), (0.3, 0.4)] self.dict['region_tags'] = ['1.3', 'yeah'] self.dict['region_max_areas'] = [36.0, -7.1] self.dict['points'] = [(0.0, 0.0), (0.0, 4.0), (4.0, 0.0), (1.0, 1.0)] self.dict['vertices'] = [(0.0, 0.0), (0.0, 4.0), (4.0, 0.0), (1.0, 1.0), (2.0, 2.0)] self.dict['triangles'] = [(3, 2, 4), (1, 0, 3), (3, 4,1), (2, 3, 0)] self.dict['segments'] = [(0, 1), (1, 4), (2, 0), (0, 3), (4, 2)] self.dict['triangle_tags'] = ['1.3', '1.3', '1.3', '1.3'] self.dict['vertex_attributes'] = [[1.2, 2.], [1.2, 2.], [1.2, 2.], [1.2, 2.], [1.2, 3.]] self.dict['triangle_neighbors'] = [[-1, 2, 3], [3, 2, -1], [-1, 1, 0], [1, -1, 0]] self.dict['segment_tags'] = ['50', '40', '30', '20', '40'] self.dict['vertex_attribute_titles'] = ['bed elevation', 'height'] self.dict['geo_reference'] = Geo_reference(56, 1.9, 1.9) self.dict_1 = {} self.dict_1['outline_segments'] = [(0, 1), (1, 2), (0, 2), (0, 3)] self.dict_1['outline_segment_tags'] = ['50', '40', '30', '20'] self.dict_1['holes'] = [(0.2, 0.6)] self.dict_1['point_attributes'] = [[5], [4], [3], [2]] self.dict_1['regions'] = [(0.3, 0.3), (0.3, 0.4)] self.dict_1['region_tags'] = ['1.3', 'yeah'] self.dict_1['region_max_areas'] = [36.0, -7.1] self.dict_1['points'] = [(0.0, 0.0), (0.0, 4.0), (4.0, 0.0), (1.0, 1.0)] self.dict_1['vertices'] = [(0.0, 0.0), (0.0, 4.0), (4.0, 0.0), (1.0, 1.0), (2.0, 2.0)] self.dict_1['triangles'] = [(3, 2, 4), (1, 0, 3), (3, 4,1), (2, 3, 0)] self.dict_1['segments'] = [(0, 1), (1, 4), (2, 0), (0, 3), (4, 2)] self.dict_1['triangle_tags'] = ['1.3', '1.3', '1.3', '1.3'] self.dict_1['vertex_attributes'] = [[1.2], [1.2], [1.2], [1.2], [1.2]] self.dict_1['triangle_neighbors'] = [[-1, 2, 3], [3, 2, -1], [-1, 1, 0], [1, -1, 0]] self.dict_1['segment_tags'] = ['50', '40', '30', '20', '40'] self.dict_1['vertex_attribute_titles'] = ['height'] self.dict_1['geo_reference'] = Geo_reference(56, 1.9, 1.9) self.sparse_dict = {} self.sparse_dict['outline_segments'] = [] self.sparse_dict['outline_segment_tags'] = [] self.sparse_dict['holes'] = [] self.sparse_dict['points'] = [(0.0, 0.0), (9, 8)] self.sparse_dict['point_attributes'] = [[], []] # points don't have to # have attributes self.sparse_dict['regions'] = [] self.sparse_dict['region_tags'] = [] self.sparse_dict['region_max_areas'] = [] self.sparse_dict['vertices'] = [] self.sparse_dict['triangles'] = [] self.sparse_dict['segments'] = [] self.sparse_dict['triangle_tags'] = [] self.sparse_dict['vertex_attributes'] = [] self.sparse_dict['triangle_neighbors'] = [] self.sparse_dict['segment_tags'] = [] self.sparse_dict['vertex_attribute_titles'] = [] self.blank_dict = {} self.blank_dict['outline_segments'] = [] self.blank_dict['outline_segment_tags'] = [] self.blank_dict['holes'] = [] self.blank_dict['points'] = [] self.blank_dict['point_attributes'] = [] self.blank_dict['regions'] = [] self.blank_dict['region_tags'] = [] self.blank_dict['region_max_areas'] = [] self.blank_dict['vertices'] = [] self.blank_dict['triangles'] = [] self.blank_dict['segments'] = [] self.blank_dict['triangle_tags'] = [] self.blank_dict['vertex_attributes'] = [] self.blank_dict['triangle_neighbors'] = [] self.blank_dict['segment_tags'] = [] self.blank_dict['vertex_attribute_titles'] = [] self.tri_dict = {} self.tri_dict['outline_segments'] = [[0, 1]] self.tri_dict['outline_segment_tags'] = [''] self.tri_dict['holes'] = [] self.tri_dict['points'] = [(9, 8), (7, 8)] self.tri_dict['point_attributes'] = [[], []] self.tri_dict['regions'] = [] self.tri_dict['region_tags'] = [] self.tri_dict['region_max_areas'] = [] self.tri_dict['vertices'] = [[9, 8], [7, 8], [4, 5]] self.tri_dict['triangles'] = [[0, 1, 2]] self.tri_dict['segments'] = [[0, 1]] self.tri_dict['triangle_tags'] = [''] self.tri_dict['vertex_attributes'] = None self.tri_dict['triangle_neighbors'] = [[0, 0, 0]] self.tri_dict['segment_tags'] = [''] self.tri_dict['vertex_attribute_titles'] = [] self.seg_dict = {} self.seg_dict['outline_segments'] = [[0, 1]] self.seg_dict['outline_segment_tags'] = [''] self.seg_dict['holes'] = [] self.seg_dict['points'] = [(9, 8), (7, 8)] self.seg_dict['point_attributes'] = [[], []] self.seg_dict['regions'] = [(5, 4)] self.seg_dict['region_tags'] = [''] self.seg_dict['region_max_areas'] = [-999] self.seg_dict['vertices'] = [(9, 8), (7, 8)] self.seg_dict['triangles'] = [] self.seg_dict['segments'] = [[0, 1]] self.seg_dict['triangle_tags'] = [] self.seg_dict['vertex_attributes'] = None self.seg_dict['triangle_neighbors'] = [] self.seg_dict['segment_tags'] = [''] self.seg_dict['vertex_attribute_titles'] = [] self.reg_dict = {} self.reg_dict['outline_segments'] = [[0, 1]] self.reg_dict['outline_segment_tags'] = [''] self.reg_dict['holes'] = [] self.reg_dict['points'] = [(9, 8), (7, 8)] self.reg_dict['point_attributes'] = [[], []] self.reg_dict['regions'] = [(5, 4)] self.reg_dict['region_tags'] = [''] self.reg_dict['region_max_areas'] = [] self.reg_dict['vertices'] = [(9, 8), (7, 8)] self.reg_dict['triangles'] = [] self.reg_dict['segments'] = [[0, 1]] self.reg_dict['triangle_tags'] = [] self.reg_dict['vertex_attributes'] = [[], []] self.reg_dict['triangle_neighbors'] = [] self.reg_dict['segment_tags'] = [''] self.reg_dict['vertex_attribute_titles'] = [] self.triangle_tags_dict = {} self.triangle_tags_dict['outline_segments'] = [(0, 1), (1, 2), (0, 2), (0, 3)] self.triangle_tags_dict['outline_segment_tags'] = ['50', '40', '30', '20'] self.triangle_tags_dict['holes'] = [(0.2, 0.6)] self.triangle_tags_dict['point_attributes'] = [[5, 2], [4, 2], [3, 2], [2,2]] self.triangle_tags_dict['regions'] = [(0.3, 0.3), (0.3, 0.4)] self.triangle_tags_dict['region_tags'] = ['1.3', 'yeah'] self.triangle_tags_dict['region_max_areas'] = [36.0, -7.1] self.triangle_tags_dict['points'] = [(0.0, 0.0), (0.0, 4.0), (4.0, 0.0), (1.0, 1.0)] self.triangle_tags_dict['vertices'] = [(0.0, 0.0), (0.0, 4.0), (4.0, 0.0), (1.0, 1.0), (2.0, 2.0)] self.triangle_tags_dict['triangles'] = [(3, 2, 4), (1, 0, 3), (3, 4, 1), (2, 3, 0)] self.triangle_tags_dict['segments'] = [(0, 1), (1, 4), (2, 0), (0, 3), (4, 2)] self.triangle_tags_dict['triangle_tags'] = ['yeah', '1.3', '1.3', ''] self.triangle_tags_dict['vertex_attributes'] = [[1.2,2.], [1.2,2.], [1.2,2.], [1.2,2.], [1.2,3.]] self.triangle_tags_dict['triangle_neighbors'] = [[-1, 2, 3], [3, 2, -1], [-1, 1, 0], [1, -1, 0]] self.triangle_tags_dict['segment_tags'] = ['50', '40', '30', '20', '40'] self.triangle_tags_dict['vertex_attribute_titles'] = ['bed elevation', 'height'] self.triangle_tags_dict['geo_reference'] = Geo_reference(56, 1.9, 1.9)
def concept_ungenerateII(self): from anuga import Domain, Reflective_boundary, Dirichlet_boundary x = 0 y = 0 mesh_geo = geo_reference = Geo_reference(56, x, y) # These are the absolute values polygon_absolute = [[0, 0], [100, 0], [100, 100], [0, 100]] x_p = -10 y_p = -40 geo_ref_poly = Geo_reference(56, x_p, y_p) polygon = geo_ref_poly.change_points_geo_ref(polygon_absolute) boundary_tags = {'wall': [0, 1, 3], 'wave': [2]} inner1_polygon_absolute = [[10, 10], [20, 10], [20, 20], [10, 20]] inner1_polygon = geo_ref_poly.\ change_points_geo_ref(inner1_polygon_absolute) inner2_polygon_absolute = [[30, 30], [40, 30], [40, 40], [30, 40]] inner2_polygon = geo_ref_poly.\ change_points_geo_ref(inner2_polygon_absolute) max_area = 1 interior_regions = [(inner1_polygon, 5), (inner2_polygon, 10)] m = create_mesh_from_regions(polygon, boundary_tags, max_area, interior_regions=interior_regions, poly_geo_reference=geo_ref_poly, mesh_geo_reference=mesh_geo) m.export_mesh_file('a_test_mesh_iknterface.tsh') fileName = tempfile.mktemp('.txt') file = open(fileName, 'w') file.write(' 1 ?? ??\n\ 90.0 90.0\n\ 81.0 90.0\n\ 81.0 81.0\n\ 90.0 81.0\n\ 90.0 90.0\n\ END\n\ 2 ?? ??\n\ 10.0 80.0\n\ 10.0 90.0\n\ 20.0 90.0\n\ 10.0 80.0\n\ END\n\ END\n') file.close() m.import_ungenerate_file(fileName) #, tag='wall') os.remove(fileName) m.generate_mesh(maximum_triangle_area=max_area, verbose=False) mesh_filename = 'bento_b.tsh' m.export_mesh_file(mesh_filename) domain = Domain(mesh_filename, use_cache=False) Br = Reflective_boundary(domain) Bd = Dirichlet_boundary([3, 0, 0]) domain.set_boundary({'wall': Br, 'wave': Bd}) yieldstep = 0.1 finaltime = 10 for t in domain.evolve(yieldstep, finaltime): domain.write_time()
def test_get_maximum_inundation_de0(self): """Test that sww information can be converted correctly to maximum runup elevation and location (without and with georeferencing) This test creates a slope and a runup which is maximal (~11m) at around 10s and levels out to the boundary condition (1m) at about 30s. """ import time, os from anuga.file.netcdf import NetCDFFile verbose = False #Setup #from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular # Create basic mesh (100m x 100m) points, vertices, boundary = rectangular(20, 5, 100, 50) # Create shallow water domain domain = Domain(points, vertices, boundary) domain.set_flow_algorithm('DE0') domain.set_low_froude(0) domain.set_minimum_storable_height(0.01) filename = 'runup_test_3' domain.set_name(filename) swwfile = domain.get_name() + '.sww' domain.set_datadir('.') domain.format = 'sww' domain.smooth = True # FIXME (Ole): Backwards compatibility # Look at sww file and see what happens when # domain.tight_slope_limiters = 1 domain.tight_slope_limiters = 0 domain.use_centroid_velocities = 0 # Backwards compatibility (7/5/8) Br = Reflective_boundary(domain) Bd = Dirichlet_boundary([1.0, 0, 0]) #---------- First run without geo referencing domain.set_quantity('elevation', lambda x, y: -0.2 * x + 14) # Slope domain.set_quantity('stage', -6) domain.set_boundary({'left': Br, 'right': Bd, 'top': Br, 'bottom': Br}) for t in domain.evolve(yieldstep=1, finaltime=50): pass # Check maximal runup runup, location, max_time = get_maximum_inundation_data( swwfile, return_time=True) if verbose: print('Runup, location', runup, location, max_time) assert num.allclose(runup, 3.33333325386) assert num.allclose(location, [53.333332, 43.333332]) assert num.allclose(max_time, 10.0) # Check runup in restricted time interval runup, location, max_time = get_maximum_inundation_data( swwfile, time_interval=[0, 9], return_time=True) if verbose: print('Runup, location:', runup, location, max_time) assert num.allclose(runup, 2.66666674614) assert num.allclose(location, [56.666668, 16.666666]) assert num.allclose(max_time, 9.0) # Check final runup runup, location = get_maximum_inundation_data(swwfile, time_interval=[45, 50]) if verbose: print('Runup, location:', runup, location, max_time) assert num.allclose(runup, 3.33333325386) assert num.allclose(location, [53.333332, 33.333332]) #assert num.allclose(max_time, 45.0) # Check runup restricted to a polygon p = [[50, 1], [99, 1], [99, 40], [50, 40]] runup, location = get_maximum_inundation_data(swwfile, polygon=p) #runup = get_maximum_inundation_elevation(swwfile, polygon=p) #location = get_maximum_inundation_location(swwfile, polygon=p) #print runup, location, max_time assert num.allclose(runup, 3.33333325386) assert num.allclose(location, [53.333332, 33.333332]) #assert num.allclose(max_time, 11.0) # Check that mimimum_storable_height works fid = NetCDFFile(swwfile, netcdf_mode_r) # Open existing file stage = fid.variables['stage_c'][:] z = fid.variables['elevation_c'][:] xmomentum = fid.variables['xmomentum_c'][:] ymomentum = fid.variables['ymomentum_c'][:] for i in range(stage.shape[0]): h = stage[i] - z # depth vector at time step i # Check every node location for j in range(stage.shape[1]): # Depth being either exactly zero implies # momentum being zero. # Or else depth must be greater than or equal to # the minimal storable height if h[j] == 0.0: assert xmomentum[i, j] == 0.0 assert ymomentum[i, j] == 0.0 else: assert h[j] >= 0.0 fid.close() # Cleanup os.remove(swwfile) #------------- Now the same with georeferencing domain.time = 0.0 E = 308500 N = 6189000 #E = N = 0 domain.geo_reference = Geo_reference(56, E, N) domain.set_quantity('elevation', lambda x, y: -0.2 * x + 14) # Slope domain.set_quantity('stage', -6) domain.set_boundary({'left': Br, 'right': Bd, 'top': Br, 'bottom': Br}) for t in domain.evolve(yieldstep=1, finaltime=50): pass # Check maximal runup runup, location = get_maximum_inundation_data(swwfile) #print 'Runup, location', runup, location, max_time assert num.allclose(runup, 3.33333325386) assert num.allclose(location, [53.333332 + E, 43.333332 + N]) #assert num.allclose(max_time, 10.0) # Check runup in restricted time interval runup, location = get_maximum_inundation_data(swwfile, time_interval=[0, 9]) #print 'Runup, location:',runup, location, max_time assert num.allclose(runup, 2.66666674614) assert num.allclose(location, [56.666668 + E, 16.666666 + N]) #assert num.allclose(max_time, 9.0) # Check final runup runup, location = get_maximum_inundation_data(swwfile, time_interval=[45, 50]) #print 'Runup, location:',runup, location, max_time assert num.allclose(runup, 3.33333325386) assert num.allclose(location, [53.333332 + E, 33.333332 + N]) #assert num.allclose(max_time, 45.0) # Check runup restricted to a polygon p = num.array([[50, 1], [99, 1], [99, 40], [50, 40]], num.int) + num.array([E, N], num.int) runup, location = get_maximum_inundation_data(swwfile, polygon=p) #print runup, location, max_time assert num.allclose(runup, 3.33333325386) assert num.allclose(location, [53.333332 + E, 33.333332 + N]) #assert num.allclose(max_time, 11.0) # Cleanup os.remove(swwfile)