def set_values_NNeigh(self, name, filename): """ Sets the values of the quantity 'name' at centroids, from raster 'filename' using a nearest neighbour interpolation. This extracts the exact value of the raster at those coordinates and does not interpolate the values. Do not set at vertices or edges - not used in veg calculations """ # index = self.get_unique_vertices() # volume_id = [i / 3 for i in index] # vertex_id = [i % 3 for i in index] # # print volume_id # print vertex_id # coord = self.domain.get_nodes(absolute=True) # extract the data from the pts file G_data = Geospatial_data(filename) points = G_data.get_data_points(absolute=True) z = G_data.get_attributes(attribute_name=None) # create interpolator interp = NearestNDInterpolator(points, z) # set quantity at centroids z = interp(coord) z = z[num.newaxis, :].transpose() # z_c = np.concatenate((coord, z), axis=1 ) self.domain.quantities[name].set_values_from_array(z, location="unique vertices")
def points_needed(seg, ll_lat, ll_long, grid_spacing, lat_amount, long_amount, zone, isSouthHemisphere): """ seg is two points, in UTM return a list of the points, in lats and longs that are needed to interpolate any point on the segment. """ from math import sqrt geo_reference = Geo_reference(zone=zone) geo = Geospatial_data(seg, geo_reference=geo_reference) seg_lat_long = geo.get_data_points(as_lat_long=True, isSouthHemisphere=isSouthHemisphere) # 1.415 = 2^0.5, rounded up.... sqrt_2_rounded_up = 1.415 buffer = sqrt_2_rounded_up * grid_spacing max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer first_row = (min_long - ll_long) / grid_spacing # To round up first_row_long = int(round(first_row + 0.5)) last_row = (max_long - ll_long) / grid_spacing # round down last_row_long = int(round(last_row)) first_row = (min_lat - ll_lat) / grid_spacing # To round up first_row_lat = int(round(first_row + 0.5)) last_row = (max_lat - ll_lat) / grid_spacing # round down last_row_lat = int(round(last_row)) max_distance = 157147.4112 * grid_spacing points_lat_long = [] # Create a list of the lat long points to include. for index_lat in range(first_row_lat, last_row_lat + 1): for index_long in range(first_row_long, last_row_long + 1): lat = ll_lat + index_lat*grid_spacing long = ll_long + index_long*grid_spacing #filter here to keep good points if keep_point(lat, long, seg, max_distance): points_lat_long.append((lat, long)) #must be hashable # Now that we have these points, lets throw ones out that are too far away return points_lat_long
def points_needed(seg, ll_lat, ll_long, grid_spacing, lat_amount, long_amount, zone, isSouthHemisphere): """ seg is two points, in UTM return a list of the points, in lats and longs that are needed to interpolate any point on the segment. """ from math import sqrt geo_reference = Geo_reference(zone=zone) geo = Geospatial_data(seg, geo_reference=geo_reference) seg_lat_long = geo.get_data_points(as_lat_long=True, isSouthHemisphere=isSouthHemisphere) # 1.415 = 2^0.5, rounded up.... sqrt_2_rounded_up = 1.415 buffer = sqrt_2_rounded_up * grid_spacing max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer first_row = old_div((min_long - ll_long), grid_spacing) # To round up first_row_long = int(round(first_row + 0.5)) last_row = old_div((max_long - ll_long), grid_spacing) # round down last_row_long = int(round(last_row)) first_row = old_div((min_lat - ll_lat), grid_spacing) # To round up first_row_lat = int(round(first_row + 0.5)) last_row = old_div((max_lat - ll_lat), grid_spacing) # round down last_row_lat = int(round(last_row)) max_distance = 157147.4112 * grid_spacing points_lat_long = [] # Create a list of the lat long points to include. for index_lat in range(first_row_lat, last_row_lat + 1): for index_long in range(first_row_long, last_row_long + 1): lat = ll_lat + index_lat * grid_spacing long = ll_long + index_long * grid_spacing #filter here to keep good points if keep_point(lat, long, seg, max_distance): points_lat_long.append((lat, long)) #must be hashable # Now that we have these points, lets throw ones out that are too far away return points_lat_long
def test_calc_max_depth_and_momentum(self): sww_file = "tid" # self.domain.get_name() + "." + self.domain.format points_lat_long = [[-34, 151.5], [-35.5, 151.5], [-50, 151]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points(absolute=True) deps, _ = calc_max_depth_and_momentum(sww_file, points_ab, verbose=self.verbose, use_cache=False) # Test values based on returned results, so not an excellent test assert num.allclose(deps[0], 0.113204555211) assert num.allclose(deps[1], 11.3215) assert num.allclose(deps[2], 0.0) # this value is outside both sww files
def alpha_shape_via_files(point_file, boundary_file, alpha=None): """ Load a point file and return the alpha shape boundary as a boundary file. Inputs: point_file: File location of the input file, points format (.csv or .pts) boundary_file: File location of the generated output file alpha: The alpha value can be optionally specified. If it is not specified the optimum alpha value will be used. """ geospatial = Geospatial_data(point_file) points = geospatial.get_data_points(absolute=False) AS = Alpha_Shape(points, alpha) AS.write_boundary(boundary_file)
def alpha_shape_via_files(point_file, boundary_file, alpha= None): """ Load a point file and return the alpha shape boundary as a boundary file. Inputs: point_file: File location of the input file, points format (.csv or .pts) boundary_file: File location of the generated output file alpha: The alpha value can be optionally specified. If it is not specified the optimum alpha value will be used. """ geospatial = Geospatial_data(point_file) points = geospatial.get_data_points(absolute=False) AS = Alpha_Shape(points, alpha) AS.write_boundary(boundary_file)
def test_calc_max_depth_and_momentum(self): sww_file = "tid" # self.domain.get_name() + "." + self.domain.format points_lat_long = [[-34, 151.5],[-35.5, 151.5],[-50, 151]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points( absolute = True) deps, _ = calc_max_depth_and_momentum(sww_file, points_ab, verbose=self.verbose, use_cache = False) # Test values based on returned results, so not an excellent test assert num.allclose(deps[0],0.113204555211) assert num.allclose(deps[1],11.3215) assert num.allclose(deps[2],0.0) # this value is outside both sww files
def test_fit_to_mesh_UTM_file(self): #Get (enough) datapoints data_points = [[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65], [-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6]] data_geo_spatial = Geospatial_data(data_points, points_are_lats_longs=True) points_UTM = data_geo_spatial.get_data_points(absolute=True) attributes = linear_function(points_UTM) att = 'elevation' #Create .txt file txt_file = tempfile.mktemp(".txt") file = open(txt_file, "w") file.write(" x,y," + att + " \n") for data_point, attribute in zip(points_UTM, attributes): row = str(data_point[0]) + ',' + str(data_point[1]) \ + ',' + str(attribute) #print "row", row file.write(row + "\n") file.close() # setting up the mesh a = [240000, 7620000] b = [240000, 7680000] c = [300000, 7620000] points = [a, b, c] elements = [[0, 2, 1]] f = fit_to_mesh(txt_file, points, elements, alpha=0.0, max_read_lines=2) answer = linear_function(points) #print "f",f #print "answer",answer assert num.allclose(f, answer) # Delete file! os.remove(txt_file)
def test_fit_to_mesh_UTM_file(self): #Get (enough) datapoints data_points = [[-21.5, 114.5],[-21.4, 114.6],[-21.45,114.65], [-21.35, 114.65],[-21.45, 114.55],[-21.45,114.6]] data_geo_spatial = Geospatial_data(data_points, points_are_lats_longs=True) points_UTM = data_geo_spatial.get_data_points(absolute=True) attributes = linear_function(points_UTM) att = 'elevation' #Create .txt file txt_file = tempfile.mktemp(".txt") file = open(txt_file,"w") file.write(" x,y," + att + " \n") for data_point, attribute in map(None, points_UTM, attributes): row = str(data_point[0]) + ',' + str(data_point[1]) \ + ',' + str(attribute) #print "row", row file.write(row + "\n") file.close() # setting up the mesh a = [240000, 7620000] b = [240000, 7680000] c = [300000, 7620000] points = [a, b, c] elements = [[0,2,1]] f = fit_to_mesh(txt_file, points, elements, alpha=0.0, max_read_lines=2) answer = linear_function(points) #print "f",f #print "answer",answer assert num.allclose(f, answer) # Delete file! os.remove(txt_file)
def set_values_NNeigh(self, name, filename): """ Sets the values of the quantity 'name' at centroids, from raster 'filename' using a nearest neighbour interpolation. This extracts the exact value of the raster at those coordinates and does not interpolate the values. Do not set at vertices or edges - not used in veg calculations """ # index = self.get_unique_vertices() # volume_id = [i / 3 for i in index] # vertex_id = [i % 3 for i in index] # # print volume_id # print vertex_id # coord = self.get_nodes(absolute=True) # extract the data from the pts file G_data = Geospatial_data(filename) points = G_data.get_data_points(absolute=True) z = G_data.get_attributes(attribute_name=None) # create interpolator interp = NearestNDInterpolator( points, z ) # set quantity at centroids z = interp( coord ) z = z[np.newaxis, :].transpose() # z_c = np.concatenate((coord, z), axis=1 ) self.quantities[name].set_values_from_array(z, location = 'unique vertices')
old_title_list = mesh_dict['vertex_attribute_titles'].tolist() else: old_title_list = mesh_dict['vertex_attribute_titles'] if verbose: log.critical('tsh file %s loaded' % mesh_file) # load in the points file try: geo = Geospatial_data(point_file, verbose=verbose) except IOError, e: if display_errors: log.critical("Could not load bad file: %s" % str(e)) raise IOError #Re-raise exception point_coordinates = geo.get_data_points(absolute=True) title_list, point_attributes = concatinate_attributelist( \ geo.get_all_attributes()) if mesh_dict.has_key('geo_reference') and \ not mesh_dict['geo_reference'] is None: mesh_origin = mesh_dict['geo_reference'].get_origin() else: mesh_origin = None if verbose: log.critical("points file loaded") if verbose: log.critical("fitting to mesh") f = fit_to_mesh(point_coordinates, vertex_coordinates,
def test_URS_points_northern_hemisphere(self): LL_LAT = 8.0 LL_LONG = 97.0 GRID_SPACING = 2.0/60.0 LAT_AMOUNT = 2 LONG_AMOUNT = 2 ZONE = 47 # points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8,1+i*2,0), degminsec2decimal_degrees(97,1+i*2,0))) #print "points", points geo_poly = Geospatial_data(data_points=points, points_are_lats_longs=True) poly_lat_long = geo_poly.get_data_points(as_lat_long=False, isSouthHemisphere=False) #print "seg_lat_long", poly_lat_long # geo=URS_points_needed_to_file('test_example_poly3', poly_lat_long, # ZONE, # LL_LAT, LL_LONG, # GRID_SPACING, # LAT_AMOUNT, LONG_AMOUNT, # isSouthernHemisphere=False, # export_csv=True, # verbose=self.verbose) geo=calculate_boundary_points(poly_lat_long, ZONE, LL_LAT, LL_LONG, GRID_SPACING, LAT_AMOUNT, LONG_AMOUNT, isSouthHemisphere=False, verbose=self.verbose) results = frozenset(geo.get_data_points(as_lat_long=True, isSouthHemisphere=False)) #print 'results',results # These are a set of points that have to be in results points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8,i*2,0), degminsec2decimal_degrees(97,i*2,0))) #print "answer points", points answer = frozenset(points) for point in points: found = False for result in results: if num.allclose(point, result): found = True break if not found: assert False
def fit_to_mesh_file(mesh_file, point_file, mesh_output_file, alpha=DEFAULT_ALPHA, verbose=False, expand_search=False, precrop=False, display_errors=True): """ Given a mesh file (tsh) and a point attribute file, fit point attributes to the mesh and write a mesh file with the results. Note: the points file needs titles. If you want anuga to use the tsh file, make sure the title is elevation. NOTE: Throws IOErrors, for a variety of file problems. """ from anuga.load_mesh.loadASCII import import_mesh_file, \ export_mesh_file, concatinate_attributelist try: mesh_dict = import_mesh_file(mesh_file) except IOError as e: if display_errors: log.critical("Could not load bad file: %s" % str(e)) raise IOError # Could not load bad mesh file. vertex_coordinates = mesh_dict['vertices'] triangles = mesh_dict['triangles'] if isinstance(mesh_dict['vertex_attributes'], num.ndarray): old_point_attributes = mesh_dict['vertex_attributes'].tolist() else: old_point_attributes = mesh_dict['vertex_attributes'] if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray): old_title_list = mesh_dict['vertex_attribute_titles'].tolist() else: old_title_list = mesh_dict['vertex_attribute_titles'] if verbose: log.critical('tsh file %s loaded' % mesh_file) # load in the points file try: geo = Geospatial_data(point_file, verbose=verbose) except IOError as e: if display_errors: log.critical("Could not load bad file: %s" % str(e)) raise IOError # Re-raise exception point_coordinates = geo.get_data_points(absolute=True) title_list, point_attributes = concatinate_attributelist( geo.get_all_attributes()) if 'geo_reference' in mesh_dict and \ not mesh_dict['geo_reference'] is None: mesh_origin = mesh_dict['geo_reference'].get_origin() else: mesh_origin = None if verbose: log.critical("points file loaded") if verbose: log.critical("fitting to mesh") f = fit_to_mesh(point_coordinates, vertex_coordinates, triangles, None, point_attributes, alpha=alpha, verbose=verbose, data_origin=None, mesh_origin=mesh_origin) if verbose: log.critical("finished fitting to mesh") # convert array to list of lists new_point_attributes = f.tolist() # FIXME have this overwrite attributes with the same title - DSG # Put the newer attributes last if old_title_list != []: old_title_list.extend(title_list) # FIXME can this be done a faster way? - DSG for i in range(len(old_point_attributes)): old_point_attributes[i].extend(new_point_attributes[i]) mesh_dict['vertex_attributes'] = old_point_attributes mesh_dict['vertex_attribute_titles'] = old_title_list else: mesh_dict['vertex_attributes'] = new_point_attributes mesh_dict['vertex_attribute_titles'] = title_list if verbose: log.critical("exporting to file %s" % mesh_output_file) try: export_mesh_file(mesh_output_file, mesh_dict) except IOError as e: if display_errors: log.critical("Could not write file %s", str(e)) raise IOError
def setUp(self): # print "****set up****" # Create an sww file # Set up an sww that has a geo ref. # have it cover an area in Australia. 'gong maybe # Don't have many triangles though! # Site Name: GDA-MGA: (UTM with GRS80 ellipsoid) # Zone: 56 # Easting: 222908.705 Northing: 6233785.284 # Latitude: -34 0 ' 0.00000 '' Longitude: 150 0 ' 0.00000 '' # Grid Convergence: -1 40 ' 43.13 '' Point Scale: 1.00054660 # geo-ref # Zone: 56 # Easting: 220000 Northing: 6230000 # have a big area covered. mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-33, 152], [-35, 152], [-35, 150], [-33, 150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points(absolute=True) geo = Geo_reference(56, 400000, 6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) # Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order = 2 # Set some field values # domain.set_quantity('stage', 1.0) domain.set_quantity("elevation", -0.5) domain.set_quantity("friction", 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary({"exterior": B}) ###################### # Initial condition - with jumps bed = domain.quantities["elevation"].vertex_values stage = num.zeros(bed.shape, num.float) h = 0.3 for i in range(stage.shape[0]): if i % 2 == 0: stage[i, :] = bed[i, :] + h else: stage[i, :] = bed[i, :] domain.set_quantity("stage", stage) domain.set_quantity("xmomentum", stage * 22.0) domain.set_quantity("ymomentum", stage * 55.0) domain.distribute_to_vertices_and_edges() self.domain = domain C = domain.get_vertex_coordinates() self.X = C[:, 0:6:2].copy() self.Y = C[:, 1:6:2].copy() self.F = bed # sww_file = tempfile.mktemp("") self.domain.set_name("tid_P0") self.domain.format = "sww" self.domain.smooth = True self.domain.reduction = mean sww = SWW_file(self.domain) sww.store_connectivity() sww.store_timestep() self.domain.time = 2.0 sww.store_timestep() self.sww = sww # so it can be deleted # Create another sww file mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-35, 152], [-36, 152], [-36, 150], [-35, 150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points(absolute=True) geo = Geo_reference(56, 400000, 6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) # Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order = 2 # Set some field values # domain.set_quantity('stage', 1.0) domain.set_quantity("elevation", -40) domain.set_quantity("friction", 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary({"exterior": B}) ###################### # Initial condition - with jumps bed = domain.quantities["elevation"].vertex_values stage = num.zeros(bed.shape, num.float) h = 30.0 for i in range(stage.shape[0]): if i % 2 == 0: stage[i, :] = bed[i, :] + h else: stage[i, :] = bed[i, :] domain.set_quantity("stage", stage) domain.set_quantity("xmomentum", stage * 22.0) domain.set_quantity("ymomentum", stage * 55.0) domain.distribute_to_vertices_and_edges() self.domain2 = domain C = domain.get_vertex_coordinates() self.X2 = C[:, 0:6:2].copy() self.Y2 = C[:, 1:6:2].copy() self.F2 = bed # sww_file = tempfile.mktemp("") domain.set_name("tid_P1") domain.format = "sww" domain.smooth = True domain.reduction = mean sww = SWW_file(domain) sww.store_connectivity() sww.store_timestep() domain.time = 2.0 sww.store_timestep() self.swwII = sww # so it can be deleted # print "sww.filename", sww.filename # Create a csv file self.csv_file = tempfile.mktemp(".csv") fd = open(self.csv_file, "wb") writer = csv.writer(fd) writer.writerow( ["LONGITUDE", "LATITUDE", STR_VALUE_LABEL, CONT_VALUE_LABEL, "ROOF_TYPE", WALL_TYPE_LABEL, SHORE_DIST_LABEL] ) writer.writerow(["151.5", "-34", "199770", "130000", "Metal", "Timber", 20.0]) writer.writerow(["151", "-34.5", "150000", "76000", "Metal", "Double Brick", 200.0]) writer.writerow(["151", "-34.25", "150000", "76000", "Metal", "Brick Veneer", 200.0]) fd.close() # Create a csv file self.csv_fileII = tempfile.mktemp(".csv") fd = open(self.csv_fileII, "wb") writer = csv.writer(fd) writer.writerow( ["LONGITUDE", "LATITUDE", STR_VALUE_LABEL, CONT_VALUE_LABEL, "ROOF_TYPE", WALL_TYPE_LABEL, SHORE_DIST_LABEL] ) writer.writerow(["151.5", "-34", "199770", "130000", "Metal", "Timber", 20.0]) writer.writerow(["151", "-34.5", "150000", "76000", "Metal", "Double Brick", 200.0]) writer.writerow(["151", "-34.25", "150000", "76000", "Metal", "Brick Veneer", 200.0]) fd.close()
old_title_list = mesh_dict['vertex_attribute_titles'].tolist() else: old_title_list = mesh_dict['vertex_attribute_titles'] if verbose: log.critical('tsh file %s loaded' % mesh_file) # load in the points file try: geo = Geospatial_data(point_file, verbose=verbose) except IOError, e: if display_errors: log.critical("Could not load bad file: %s" % str(e)) raise IOError #Re-raise exception point_coordinates = geo.get_data_points(absolute=True) title_list, point_attributes = concatinate_attributelist( \ geo.get_all_attributes()) if mesh_dict.has_key('geo_reference') and \ not mesh_dict['geo_reference'] is None: mesh_origin = mesh_dict['geo_reference'].get_origin() else: mesh_origin = None if verbose: log.critical("points file loaded") if verbose: log.critical("fitting to mesh") f = fit_to_mesh(point_coordinates, vertex_coordinates,
def setUp(self): #print "****set up****" # Create an sww file # Set up an sww that has a geo ref. # have it cover an area in Australia. 'gong maybe #Don't have many triangles though! #Site Name: GDA-MGA: (UTM with GRS80 ellipsoid) #Zone: 56 #Easting: 222908.705 Northing: 6233785.284 #Latitude: -34 0 ' 0.00000 '' Longitude: 150 0 ' 0.00000 '' #Grid Convergence: -1 40 ' 43.13 '' Point Scale: 1.00054660 #geo-ref #Zone: 56 #Easting: 220000 Northing: 6230000 #have a big area covered. mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-33,152],[-35,152],[-35,150],[-33,150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points( absolute = True) geo = Geo_reference(56,400000,6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) #Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order=2 #Set some field values #domain.set_quantity('stage', 1.0) domain.set_quantity('elevation', -0.5) domain.set_quantity('friction', 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary( {'exterior': B}) ###################### #Initial condition - with jumps bed = domain.quantities['elevation'].vertex_values stage = num.zeros(bed.shape, num.float) h = 0.3 for i in range(stage.shape[0]): if i % 2 == 0: stage[i,:] = bed[i,:] + h else: stage[i,:] = bed[i,:] domain.set_quantity('stage', stage) domain.set_quantity('xmomentum', stage*22.0) domain.set_quantity('ymomentum', stage*55.0) domain.distribute_to_vertices_and_edges() self.domain = domain C = domain.get_vertex_coordinates() self.X = C[:,0:6:2].copy() self.Y = C[:,1:6:2].copy() self.F = bed #sww_file = tempfile.mktemp("") self.domain.set_name('tid_P0') self.domain.format = 'sww' self.domain.smooth = True self.domain.reduction = mean sww = SWW_file(self.domain) sww.store_connectivity() sww.store_timestep() self.domain.time = 2. sww.store_timestep() self.sww = sww # so it can be deleted #Create another sww file mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-35,152],[-36,152],[-36,150],[-35,150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points( absolute = True) geo = Geo_reference(56,400000,6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) #Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order=2 #Set some field values #domain.set_quantity('stage', 1.0) domain.set_quantity('elevation', -40) domain.set_quantity('friction', 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary( {'exterior': B}) ###################### #Initial condition - with jumps bed = domain.quantities['elevation'].vertex_values stage = num.zeros(bed.shape, num.float) h = 30. for i in range(stage.shape[0]): if i % 2 == 0: stage[i,:] = bed[i,:] + h else: stage[i,:] = bed[i,:] domain.set_quantity('stage', stage) domain.set_quantity('xmomentum', stage*22.0) domain.set_quantity('ymomentum', stage*55.0) domain.distribute_to_vertices_and_edges() self.domain2 = domain C = domain.get_vertex_coordinates() self.X2 = C[:,0:6:2].copy() self.Y2 = C[:,1:6:2].copy() self.F2 = bed #sww_file = tempfile.mktemp("") domain.set_name('tid_P1') domain.format = 'sww' domain.smooth = True domain.reduction = mean sww = SWW_file(domain) sww.store_connectivity() sww.store_timestep() domain.time = 2. sww.store_timestep() self.swwII = sww # so it can be deleted # print "sww.filename", sww.filename #Create a csv file self.csv_file = tempfile.mktemp(".csv") fd = open(self.csv_file,'wb') writer = csv.writer(fd) writer.writerow(['LONGITUDE','LATITUDE',STR_VALUE_LABEL,CONT_VALUE_LABEL,'ROOF_TYPE',WALL_TYPE_LABEL, SHORE_DIST_LABEL]) writer.writerow(['151.5','-34','199770','130000','Metal','Timber',20.]) writer.writerow(['151','-34.5','150000','76000','Metal','Double Brick',200.]) writer.writerow(['151','-34.25','150000','76000','Metal','Brick Veneer',200.]) fd.close() #Create a csv file self.csv_fileII = tempfile.mktemp(".csv") fd = open(self.csv_fileII,'wb') writer = csv.writer(fd) writer.writerow(['LONGITUDE','LATITUDE',STR_VALUE_LABEL,CONT_VALUE_LABEL,'ROOF_TYPE',WALL_TYPE_LABEL, SHORE_DIST_LABEL]) writer.writerow(['151.5','-34','199770','130000','Metal','Timber',20.]) writer.writerow(['151','-34.5','150000','76000','Metal','Double Brick',200.]) writer.writerow(['151','-34.25','150000','76000','Metal','Brick Veneer',200.]) fd.close()
def test_URS_points_northern_hemisphere(self): LL_LAT = 8.0 LL_LONG = 97.0 GRID_SPACING = 2.0 / 60.0 LAT_AMOUNT = 2 LONG_AMOUNT = 2 ZONE = 47 # points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8, 1 + i * 2, 0), degminsec2decimal_degrees(97, 1 + i * 2, 0))) #print "points", points geo_poly = Geospatial_data(data_points=points, points_are_lats_longs=True) poly_lat_long = geo_poly.get_data_points(as_lat_long=False, isSouthHemisphere=False) #print "seg_lat_long", poly_lat_long # geo=URS_points_needed_to_file('test_example_poly3', poly_lat_long, # ZONE, # LL_LAT, LL_LONG, # GRID_SPACING, # LAT_AMOUNT, LONG_AMOUNT, # isSouthernHemisphere=False, # export_csv=True, # verbose=self.verbose) geo = calculate_boundary_points(poly_lat_long, ZONE, LL_LAT, LL_LONG, GRID_SPACING, LAT_AMOUNT, LONG_AMOUNT, isSouthHemisphere=False, verbose=self.verbose) results = frozenset( geo.get_data_points(as_lat_long=True, isSouthHemisphere=False)) #print 'results',results # These are a set of points that have to be in results points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8, i * 2, 0), degminsec2decimal_degrees(97, i * 2, 0))) #print "answer points", points answer = frozenset(points) for point in points: found = False for result in results: if num.allclose(point, result): found = True break if not found: assert False