def set_values_NNeigh(self, name, filename): """ Sets the values of the quantity 'name' at centroids, from raster 'filename' using a nearest neighbour interpolation. This extracts the exact value of the raster at those coordinates and does not interpolate the values. Do not set at vertices or edges - not used in veg calculations """ # index = self.get_unique_vertices() # volume_id = [i / 3 for i in index] # vertex_id = [i % 3 for i in index] # # print volume_id # print vertex_id # coord = self.domain.get_nodes(absolute=True) # extract the data from the pts file G_data = Geospatial_data(filename) points = G_data.get_data_points(absolute=True) z = G_data.get_attributes(attribute_name=None) # create interpolator interp = NearestNDInterpolator(points, z) # set quantity at centroids z = interp(coord) z = z[num.newaxis, :].transpose() # z_c = np.concatenate((coord, z), axis=1 ) self.domain.quantities[name].set_values_from_array(z, location="unique vertices")
def points_needed(seg, ll_lat, ll_long, grid_spacing, lat_amount, long_amount, zone, isSouthHemisphere): """ seg is two points, in UTM return a list of the points, in lats and longs that are needed to interpolate any point on the segment. """ from math import sqrt geo_reference = Geo_reference(zone=zone) geo = Geospatial_data(seg, geo_reference=geo_reference) seg_lat_long = geo.get_data_points(as_lat_long=True, isSouthHemisphere=isSouthHemisphere) # 1.415 = 2^0.5, rounded up.... sqrt_2_rounded_up = 1.415 buffer = sqrt_2_rounded_up * grid_spacing max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer first_row = (min_long - ll_long) / grid_spacing # To round up first_row_long = int(round(first_row + 0.5)) last_row = (max_long - ll_long) / grid_spacing # round down last_row_long = int(round(last_row)) first_row = (min_lat - ll_lat) / grid_spacing # To round up first_row_lat = int(round(first_row + 0.5)) last_row = (max_lat - ll_lat) / grid_spacing # round down last_row_lat = int(round(last_row)) max_distance = 157147.4112 * grid_spacing points_lat_long = [] # Create a list of the lat long points to include. for index_lat in range(first_row_lat, last_row_lat + 1): for index_long in range(first_row_long, last_row_long + 1): lat = ll_lat + index_lat*grid_spacing long = ll_long + index_long*grid_spacing #filter here to keep good points if keep_point(lat, long, seg, max_distance): points_lat_long.append((lat, long)) #must be hashable # Now that we have these points, lets throw ones out that are too far away return points_lat_long
def test_calc_max_depth_and_momentum(self): sww_file = "tid" # self.domain.get_name() + "." + self.domain.format points_lat_long = [[-34, 151.5], [-35.5, 151.5], [-50, 151]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points(absolute=True) deps, _ = calc_max_depth_and_momentum(sww_file, points_ab, verbose=self.verbose, use_cache=False) # Test values based on returned results, so not an excellent test assert num.allclose(deps[0], 0.113204555211) assert num.allclose(deps[1], 11.3215) assert num.allclose(deps[2], 0.0) # this value is outside both sww files
def alpha_shape_via_files(point_file, boundary_file, alpha= None): """ Load a point file and return the alpha shape boundary as a boundary file. Inputs: point_file: File location of the input file, points format (.csv or .pts) boundary_file: File location of the generated output file alpha: The alpha value can be optionally specified. If it is not specified the optimum alpha value will be used. """ geospatial = Geospatial_data(point_file) points = geospatial.get_data_points(absolute=False) AS = Alpha_Shape(points, alpha) AS.write_boundary(boundary_file)
def test_points2polygon(self): att_dict = {} pointlist = num.array([[1.0, 0.0],[0.0, 1.0],[0.0, 0.0]]) att_dict['elevation'] = num.array([10.1, 0.0, 10.4]) att_dict['brightness'] = num.array([10.0, 1.0, 10.4]) fileName = tempfile.mktemp(".csv") G = Geospatial_data(pointlist, att_dict) G.export_points_file(fileName) polygon = load_pts_as_polygon(fileName) # This test may fail if the order changes assert (polygon == [[0.0, 0.0],[1.0, 0.0],[0.0, 1.0]])
def test_fit_to_mesh_pts_passing_mesh_in(self): a = [-1.0, 0.0] b = [3.0, 4.0] c = [4.0,1.0] d = [-3.0, 2.0] #3 e = [-1.0,-2.0] f = [1.0, -2.0] #5 vertices = [a, b, c, d,e,f] triangles = [[0,1,3], [1,0,2], [0,4,5], [0,5,2]] #abd bac aef afc fileName = tempfile.mktemp(".txt") file = open(fileName,"w") file.write(" x, y, elevation \n\ -2.0, 2.0, 0.\n\ -1.0, 1.0, 0.\n\ 0.0, 2.0 , 2.\n\ 1.0, 1.0 , 2.\n\ 2.0, 1.0 ,3. \n\ 0.0, 0.0 , 0.\n\ 1.0, 0.0 , 1.\n\ 0.0, -1.0, -1.\n\ -0.2, -0.5, -0.7\n\ -0.9, -1.5, -2.4\n\ 0.5, -1.9, -1.4\n\ 3.0, 1.0 , 4.\n") file.close() geo = Geospatial_data(fileName) fileName_pts = tempfile.mktemp(".pts") geo.export_points_file(fileName_pts) f = fit_to_mesh(fileName_pts, vertices, triangles, alpha=0.0, max_read_lines=2) answer = linear_function(vertices) #print "f\n",f #print "answer\n",answer assert num.allclose(f, answer) os.remove(fileName) os.remove(fileName_pts)
def test_fit_to_mesh_UTM_file(self): #Get (enough) datapoints data_points = [[-21.5, 114.5],[-21.4, 114.6],[-21.45,114.65], [-21.35, 114.65],[-21.45, 114.55],[-21.45,114.6]] data_geo_spatial = Geospatial_data(data_points, points_are_lats_longs=True) points_UTM = data_geo_spatial.get_data_points(absolute=True) attributes = linear_function(points_UTM) att = 'elevation' #Create .txt file txt_file = tempfile.mktemp(".txt") file = open(txt_file,"w") file.write(" x,y," + att + " \n") for data_point, attribute in map(None, points_UTM, attributes): row = str(data_point[0]) + ',' + str(data_point[1]) \ + ',' + str(attribute) #print "row", row file.write(row + "\n") file.close() # setting up the mesh a = [240000, 7620000] b = [240000, 7680000] c = [300000, 7620000] points = [a, b, c] elements = [[0,2,1]] f = fit_to_mesh(txt_file, points, elements, alpha=0.0, max_read_lines=2) answer = linear_function(points) #print "f",f #print "answer",answer assert num.allclose(f, answer) # Delete file! os.remove(txt_file)
def setUp(self): #print "****set up****" # Create an sww file # Set up an sww that has a geo ref. # have it cover an area in Australia. 'gong maybe #Don't have many triangles though! #Site Name: GDA-MGA: (UTM with GRS80 ellipsoid) #Zone: 56 #Easting: 222908.705 Northing: 6233785.284 #Latitude: -34 0 ' 0.00000 '' Longitude: 150 0 ' 0.00000 '' #Grid Convergence: -1 40 ' 43.13 '' Point Scale: 1.00054660 #geo-ref #Zone: 56 #Easting: 220000 Northing: 6230000 #have a big area covered. mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-33,152],[-35,152],[-35,150],[-33,150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points( absolute = True) geo = Geo_reference(56,400000,6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) #Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order=2 #Set some field values #domain.set_quantity('stage', 1.0) domain.set_quantity('elevation', -0.5) domain.set_quantity('friction', 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary( {'exterior': B}) ###################### #Initial condition - with jumps bed = domain.quantities['elevation'].vertex_values stage = num.zeros(bed.shape, num.float) h = 0.3 for i in range(stage.shape[0]): if i % 2 == 0: stage[i,:] = bed[i,:] + h else: stage[i,:] = bed[i,:] domain.set_quantity('stage', stage) domain.set_quantity('xmomentum', stage*22.0) domain.set_quantity('ymomentum', stage*55.0) domain.distribute_to_vertices_and_edges() self.domain = domain C = domain.get_vertex_coordinates() self.X = C[:,0:6:2].copy() self.Y = C[:,1:6:2].copy() self.F = bed #sww_file = tempfile.mktemp("") self.domain.set_name('tid_P0') self.domain.format = 'sww' self.domain.smooth = True self.domain.reduction = mean sww = SWW_file(self.domain) sww.store_connectivity() sww.store_timestep() self.domain.time = 2. sww.store_timestep() self.sww = sww # so it can be deleted #Create another sww file mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-35,152],[-36,152],[-36,150],[-35,150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points( absolute = True) geo = Geo_reference(56,400000,6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) #Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order=2 #Set some field values #domain.set_quantity('stage', 1.0) domain.set_quantity('elevation', -40) domain.set_quantity('friction', 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary( {'exterior': B}) ###################### #Initial condition - with jumps bed = domain.quantities['elevation'].vertex_values stage = num.zeros(bed.shape, num.float) h = 30. for i in range(stage.shape[0]): if i % 2 == 0: stage[i,:] = bed[i,:] + h else: stage[i,:] = bed[i,:] domain.set_quantity('stage', stage) domain.set_quantity('xmomentum', stage*22.0) domain.set_quantity('ymomentum', stage*55.0) domain.distribute_to_vertices_and_edges() self.domain2 = domain C = domain.get_vertex_coordinates() self.X2 = C[:,0:6:2].copy() self.Y2 = C[:,1:6:2].copy() self.F2 = bed #sww_file = tempfile.mktemp("") domain.set_name('tid_P1') domain.format = 'sww' domain.smooth = True domain.reduction = mean sww = SWW_file(domain) sww.store_connectivity() sww.store_timestep() domain.time = 2. sww.store_timestep() self.swwII = sww # so it can be deleted # print "sww.filename", sww.filename #Create a csv file self.csv_file = tempfile.mktemp(".csv") fd = open(self.csv_file,'w',newline="") writer = csv.writer(fd) writer.writerow(['LONGITUDE','LATITUDE',STR_VALUE_LABEL,CONT_VALUE_LABEL,'ROOF_TYPE',WALL_TYPE_LABEL, SHORE_DIST_LABEL]) writer.writerow(['151.5','-34','199770','130000','Metal','Timber',20.]) writer.writerow(['151','-34.5','150000','76000','Metal','Double Brick',200.]) writer.writerow(['151','-34.25','150000','76000','Metal','Brick Veneer',200.]) fd.close() #Create a csv file self.csv_fileII = tempfile.mktemp(".csv") fd = open(self.csv_fileII,'w',newline="") writer = csv.writer(fd) writer.writerow(['LONGITUDE','LATITUDE',STR_VALUE_LABEL,CONT_VALUE_LABEL,'ROOF_TYPE',WALL_TYPE_LABEL, SHORE_DIST_LABEL]) writer.writerow(['151.5','-34','199770','130000','Metal','Timber',20.]) writer.writerow(['151','-34.5','150000','76000','Metal','Double Brick',200.]) writer.writerow(['151','-34.25','150000','76000','Metal','Brick Veneer',200.]) fd.close()
def test_fit_and_interpolation_with_different_origins(self): """Fit a surface to one set of points. Then interpolate that surface using another set of points. This test tests situtaion where points and mesh belong to a different coordinate system as defined by origin. """ #Setup mesh used to represent fitted function a = [0.0, 0.0] b = [0.0, 2.0] c = [2.0, 0.0] d = [0.0, 4.0] e = [2.0, 2.0] f = [4.0, 0.0] points = [a, b, c, d, e, f] #bac, bce, ecf, dbe, daf, dae triangles = [[1,0,2], [1,2,4], [4,2,5], [3,1,4]] #Datapoints to fit from data_points1 = [[ 0.66666667, 0.66666667], [ 1.33333333, 1.33333333], [ 2.66666667, 0.66666667], [ 0.66666667, 2.66666667], [ 0.0, 1.0], [ 0.0, 3.0], [ 1.0, 0.0], [ 1.0, 1.0], [ 1.0, 2.0], [ 1.0, 3.0], [ 2.0, 1.0], [ 3.0, 0.0], [ 3.0, 1.0]] #First check that things are OK when using same origin mesh_origin = (56, 290000, 618000) #zone, easting, northing data_origin = (56, 290000, 618000) #zone, easting, northing #Fit surface to mesh interp = Fit(points, triangles, alpha=0.0, mesh_origin = mesh_origin) data_geo_spatial = Geospatial_data(data_points1, geo_reference = Geo_reference(56, 290000, 618000)) z = linear_function(data_points1) #Example z-values f = interp.fit(data_geo_spatial, z) #Fitted values at vertices #Shift datapoints according to new origins for k in range(len(data_points1)): data_points1[k][0] += mesh_origin[1] - data_origin[1] data_points1[k][1] += mesh_origin[2] - data_origin[2] #Fit surface to mesh interp = Fit(points, triangles, alpha=0.0) #Fitted values at vertices (using same z as before) f1 = interp.fit(data_points1,z) assert num.allclose(f,f1), 'Fit should have been unaltered'
def sdf2pts(name_in, name_out=None, verbose=False): """ Read HEC-RAS Elevation datal from the following ASCII format (.sdf) basename_in Sterm of input filename. basename_out Sterm of output filename. verbose True if this function is to be verbose. Example: # RAS export file created on Mon 15Aug2005 11:42 # by HEC-RAS Version 3.1.1 BEGIN HEADER: UNITS: METRIC DTM TYPE: TIN DTM: v:\\1\\cit\\perth_topo\\river_tin STREAM LAYER: c:\\local\\hecras\\21_02_03\\up_canning_cent3d.shp CROSS-SECTION LAYER: c:\\local\\hecras\\21_02_03\\up_can_xs3d.shp MAP PROJECTION: UTM PROJECTION ZONE: 50 DATUM: AGD66 VERTICAL DATUM: NUMBER OF REACHES: 19 NUMBER OF CROSS-SECTIONS: 14206 END HEADER: Only the SURFACE LINE data of the following form will be utilised CROSS-SECTION: STREAM ID:Southern-Wungong REACH ID:Southern-Wungong STATION:19040.* CUT LINE: 405548.671603161 , 6438142.7594925 405734.536092045 , 6438326.10404912 405745.130459356 , 6438331.48627354 405813.89633823 , 6438368.6272789 SURFACE LINE: 405548.67, 6438142.76, 35.37 405552.24, 6438146.28, 35.41 405554.78, 6438148.78, 35.44 405555.80, 6438149.79, 35.44 405559.37, 6438153.31, 35.45 405560.88, 6438154.81, 35.44 405562.93, 6438156.83, 35.42 405566.50, 6438160.35, 35.38 405566.99, 6438160.83, 35.37 ... END CROSS-SECTION Convert to NetCDF pts format which is points: (Nx2) float array elevation: N float array """ import os from anuga.file.netcdf import NetCDFFile if name_in[-4:] != '.sdf': raise IOError('Input file %s should be of type .sdf.' % name_in) if name_out is None: name_out = name_in[:-4] + '.pts' elif name_out[-4:] != '.pts': raise IOError('Input file %s should be of type .pts.' % name_out) # Get ASCII file infile = open(name_in, 'r') if verbose: log.critical('Reading DEM from %s' % (root + '.sdf')) lines = infile.readlines() infile.close() if verbose: log.critical('Converting to pts format') # Scan through the header, picking up stuff we need. i = 0 while lines[i].strip() == '' or lines[i].strip().startswith('#'): i += 1 assert lines[i].strip().upper() == 'BEGIN HEADER:' i += 1 assert lines[i].strip().upper().startswith('UNITS:') units = lines[i].strip().split()[1] i += 1 assert lines[i].strip().upper().startswith('DTM TYPE:') i += 1 assert lines[i].strip().upper().startswith('DTM:') i += 1 assert lines[i].strip().upper().startswith('STREAM') i += 1 assert lines[i].strip().upper().startswith('CROSS') i += 1 assert lines[i].strip().upper().startswith('MAP PROJECTION:') projection = lines[i].strip().split(':')[1] i += 1 assert lines[i].strip().upper().startswith('PROJECTION ZONE:') zone = int(lines[i].strip().split(':')[1]) i += 1 assert lines[i].strip().upper().startswith('DATUM:') datum = lines[i].strip().split(':')[1] i += 1 assert lines[i].strip().upper().startswith('VERTICAL DATUM:') i += 1 assert lines[i].strip().upper().startswith('NUMBER OF REACHES:') i += 1 assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:') number_of_cross_sections = int(lines[i].strip().split(':')[1]) i += 1 # Now read all points points = [] elevation = [] for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])): for k, entry in enumerate(entries): points.append(entry[:2]) elevation.append(entry[2]) msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\ %(j+1, number_of_cross_sections) assert j + 1 == number_of_cross_sections, msg # Get output file, write PTS data if name_out is None: ptsname = name_in[:-4] + '.pts' else: ptsname = name_out geo_ref = Geo_reference(zone, 0, 0, datum, projection, units) geo = Geospatial_data(points, {"elevation": elevation}, verbose=verbose, geo_reference=geo_ref) geo.export_points_file(ptsname)
def setUp(self): # print "****set up****" # Create an sww file # Set up an sww that has a geo ref. # have it cover an area in Australia. 'gong maybe # Don't have many triangles though! # Site Name: GDA-MGA: (UTM with GRS80 ellipsoid) # Zone: 56 # Easting: 222908.705 Northing: 6233785.284 # Latitude: -34 0 ' 0.00000 '' Longitude: 150 0 ' 0.00000 '' # Grid Convergence: -1 40 ' 43.13 '' Point Scale: 1.00054660 # geo-ref # Zone: 56 # Easting: 220000 Northing: 6230000 # have a big area covered. mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-33, 152], [-35, 152], [-35, 150], [-33, 150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points(absolute=True) geo = Geo_reference(56, 400000, 6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) # Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order = 2 # Set some field values # domain.set_quantity('stage', 1.0) domain.set_quantity("elevation", -0.5) domain.set_quantity("friction", 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary({"exterior": B}) ###################### # Initial condition - with jumps bed = domain.quantities["elevation"].vertex_values stage = num.zeros(bed.shape, num.float) h = 0.3 for i in range(stage.shape[0]): if i % 2 == 0: stage[i, :] = bed[i, :] + h else: stage[i, :] = bed[i, :] domain.set_quantity("stage", stage) domain.set_quantity("xmomentum", stage * 22.0) domain.set_quantity("ymomentum", stage * 55.0) domain.distribute_to_vertices_and_edges() self.domain = domain C = domain.get_vertex_coordinates() self.X = C[:, 0:6:2].copy() self.Y = C[:, 1:6:2].copy() self.F = bed # sww_file = tempfile.mktemp("") self.domain.set_name("tid_P0") self.domain.format = "sww" self.domain.smooth = True self.domain.reduction = mean sww = SWW_file(self.domain) sww.store_connectivity() sww.store_timestep() self.domain.time = 2.0 sww.store_timestep() self.sww = sww # so it can be deleted # Create another sww file mesh_file = tempfile.mktemp(".tsh") points_lat_long = [[-35, 152], [-36, 152], [-36, 150], [-35, 150]] spat = Geospatial_data(data_points=points_lat_long, points_are_lats_longs=True) points_ab = spat.get_data_points(absolute=True) geo = Geo_reference(56, 400000, 6000000) spat.set_geo_reference(geo) m = Mesh() m.add_vertices(spat) m.auto_segment() m.generate_mesh(verbose=False) m.export_mesh_file(mesh_file) # Create shallow water domain domain = Domain(mesh_file) os.remove(mesh_file) domain.default_order = 2 # Set some field values # domain.set_quantity('stage', 1.0) domain.set_quantity("elevation", -40) domain.set_quantity("friction", 0.03) ###################### # Boundary conditions B = Transmissive_boundary(domain) domain.set_boundary({"exterior": B}) ###################### # Initial condition - with jumps bed = domain.quantities["elevation"].vertex_values stage = num.zeros(bed.shape, num.float) h = 30.0 for i in range(stage.shape[0]): if i % 2 == 0: stage[i, :] = bed[i, :] + h else: stage[i, :] = bed[i, :] domain.set_quantity("stage", stage) domain.set_quantity("xmomentum", stage * 22.0) domain.set_quantity("ymomentum", stage * 55.0) domain.distribute_to_vertices_and_edges() self.domain2 = domain C = domain.get_vertex_coordinates() self.X2 = C[:, 0:6:2].copy() self.Y2 = C[:, 1:6:2].copy() self.F2 = bed # sww_file = tempfile.mktemp("") domain.set_name("tid_P1") domain.format = "sww" domain.smooth = True domain.reduction = mean sww = SWW_file(domain) sww.store_connectivity() sww.store_timestep() domain.time = 2.0 sww.store_timestep() self.swwII = sww # so it can be deleted # print "sww.filename", sww.filename # Create a csv file self.csv_file = tempfile.mktemp(".csv") fd = open(self.csv_file, "wb") writer = csv.writer(fd) writer.writerow( ["LONGITUDE", "LATITUDE", STR_VALUE_LABEL, CONT_VALUE_LABEL, "ROOF_TYPE", WALL_TYPE_LABEL, SHORE_DIST_LABEL] ) writer.writerow(["151.5", "-34", "199770", "130000", "Metal", "Timber", 20.0]) writer.writerow(["151", "-34.5", "150000", "76000", "Metal", "Double Brick", 200.0]) writer.writerow(["151", "-34.25", "150000", "76000", "Metal", "Brick Veneer", 200.0]) fd.close() # Create a csv file self.csv_fileII = tempfile.mktemp(".csv") fd = open(self.csv_fileII, "wb") writer = csv.writer(fd) writer.writerow( ["LONGITUDE", "LATITUDE", STR_VALUE_LABEL, CONT_VALUE_LABEL, "ROOF_TYPE", WALL_TYPE_LABEL, SHORE_DIST_LABEL] ) writer.writerow(["151.5", "-34", "199770", "130000", "Metal", "Timber", 20.0]) writer.writerow(["151", "-34.5", "150000", "76000", "Metal", "Double Brick", 200.0]) writer.writerow(["151", "-34.25", "150000", "76000", "Metal", "Brick Veneer", 200.0]) fd.close()
print 'project.bounding_polygon', project.bounding_polygon print 'project.combined_elevation_basename', project.combined_elevation_basename # Create Geospatial data from ASCII files geospatial_data = {} for filename in project.ascii_grid_filenames: absolute_filename = join(project.topographies_folder, filename) convert_dem_from_ascii2netcdf(absolute_filename, basename_out=absolute_filename, use_cache=True, verbose=True) dem2pts(absolute_filename, use_cache=True, verbose=True) G_grid = Geospatial_data(file_name=absolute_filename + '.pts', verbose=True) print 'Clip geospatial object' geospatial_data[filename] = G_grid.clip(project.bounding_polygon) # Create Geospatial data from TXT files for filename in project.point_filenames: absolute_filename = join(project.topographies_folder, filename) G_points = Geospatial_data(file_name=absolute_filename, verbose=True) print 'Clip geospatial object' geospatial_data[filename] = G_points.clip(project.bounding_polygon) #------------------------------------------------------------------------------- # Combine, clip and export dataset #------------------------------------------------------------------------------- extent_polygons = []
def trial(self, num_of_points=20000, maxArea=1000, max_points_per_cell=13, is_fit=True, use_file_type=None, blocking_len=500000, segments_in_mesh=True, save=False, verbose=False, run_profile=False, gridded=True, geo_ref=True): ''' num_of_points ''' if geo_ref is True: geo = Geo_reference(xllcorner = 2.0, yllcorner = 2.0) else: geo = None mesh_dict = self._build_regular_mesh_dict(maxArea=maxArea, is_segments=segments_in_mesh, save=save, geo=geo) points_dict = self._build_points_dict(num_of_points=num_of_points, gridded=gridded, verbose=verbose) if is_fit is True: op = "Fit_" else: op = "Interp_" profile_file = op + "P" + str(num_of_points) + \ "T" + str(len(mesh_dict['triangles'])) + \ "PPC" + str(max_points_per_cell) + \ ".txt" # Apply the geo_ref to the points, so they are relative # Pass in the geo_ref domain = Domain(mesh_dict['vertices'], mesh_dict['triangles'], use_cache=False, verbose=verbose, geo_reference=geo) #Initial time and memory t0 = time.time() #m0 = None on windows m0 = mem_usage() # Apply the geo_ref to the points, so they are relative # Pass in the geo_ref geospatial = Geospatial_data(points_dict['points'], points_dict['point_attributes'], geo_reference=geo) del points_dict if is_fit is True: if use_file_type == None: points = geospatial filename = None else: #FIXME (DSG) check that the type fileName = tempfile.mktemp("." + use_file_type) geospatial.export_points_file(fileName, absolute=True) points = None filename = fileName if run_profile is True: s = """domain.set_quantity('elevation',points,filename=filename,use_cache=False)""" pobject = profile.Profile() presult = pobject.runctx(s, vars(sys.modules[__name__]), vars()) prof_file = tempfile.mktemp(".prof") presult.dump_stats(prof_file) # # Let process these results S = pstats.Stats(prof_file) saveout = sys.stdout pfile = open(profile_file, "w") sys.stdout = pfile s = S.sort_stats('cumulative').print_stats(60) sys.stdout = saveout pfile.close() os.remove(prof_file) else: domain.set_quantity('elevation',points,filename=filename, use_cache=False, verbose=verbose) if not use_file_type == None: os.remove(fileName) else: # run an interploate problem. if run_profile: # pass in the geospatial points # and the mesh origin s="""benchmark_interpolate(mesh_dict['vertices'],mesh_dict['vertex_attributes'],mesh_dict['triangles'],geospatial,max_points_per_cell=max_points_per_cell,mesh_origin=geo)""" pobject = profile.Profile() presult = pobject.runctx(s, vars(sys.modules[__name__]), vars()) prof_file = tempfile.mktemp(".prof") presult.dump_stats(prof_file) # # Let process these results S = pstats.Stats(prof_file) saveout = sys.stdout pfile = open(profile_file, "w") sys.stdout = pfile s = S.sort_stats('cumulative').print_stats(60) sys.stdout = saveout pfile.close() os.remove(prof_file) else: # pass in the geospatial points benchmark_interpolate(mesh_dict['vertices'], mesh_dict['vertex_attributes'], mesh_dict['triangles'], geospatial, mesh_origin=geo, max_points_per_cell=max_points_per_cell, verbose=verbose) time_taken_sec = (time.time()-t0) m1 = mem_usage() if m0 is None or m1 is None: memory_used = None else: memory_used = (m1 - m0) #print 'That took %.2f seconds' %time_taken_sec # return the times spent in first cell searching and # backing up. #search_one_cell_time, search_more_cells_time = search_times() #reset_search_times() #print "bench - build_quadtree_time", get_build_quadtree_time() return time_taken_sec, memory_used, len(mesh_dict['triangles']), \ get_build_quadtree_time()
assert len(vertex_points.shape) == 2 # Interpolate from anuga.fit_interpolate.interpolate import Interpolate interp = Interpolate(vertex_points, volumes, verbose=verbose) # Interpolate using quantity values if verbose: log.critical('Interpolating') interpolated_values = interp.interpolate(q, data_points).flatten() if verbose: log.critical('Interpolated values are in [%f, %f]' % (num.min(interpolated_values), num.max(interpolated_values))) # Assign NODATA_value to all points outside bounding polygon # (from interpolation mesh) P = interp.mesh.get_boundary_polygon() outside_indices = outside_polygon(data_points, P, closed=True) for i in outside_indices: interpolated_values[i] = NODATA_value # Store results G = Geospatial_data(data_points=data_points, attributes=interpolated_values) G.export_points_file(name_out, absolute = True) fid.close()
def test_URS_points_northern_hemisphere(self): LL_LAT = 8.0 LL_LONG = 97.0 GRID_SPACING = 2.0/60.0 LAT_AMOUNT = 2 LONG_AMOUNT = 2 ZONE = 47 # points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8,1+i*2,0), degminsec2decimal_degrees(97,1+i*2,0))) #print "points", points geo_poly = Geospatial_data(data_points=points, points_are_lats_longs=True) poly_lat_long = geo_poly.get_data_points(as_lat_long=False, isSouthHemisphere=False) #print "seg_lat_long", poly_lat_long # geo=URS_points_needed_to_file('test_example_poly3', poly_lat_long, # ZONE, # LL_LAT, LL_LONG, # GRID_SPACING, # LAT_AMOUNT, LONG_AMOUNT, # isSouthernHemisphere=False, # export_csv=True, # verbose=self.verbose) geo=calculate_boundary_points(poly_lat_long, ZONE, LL_LAT, LL_LONG, GRID_SPACING, LAT_AMOUNT, LONG_AMOUNT, isSouthHemisphere=False, verbose=self.verbose) results = frozenset(geo.get_data_points(as_lat_long=True, isSouthHemisphere=False)) #print 'results',results # These are a set of points that have to be in results points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8,i*2,0), degminsec2decimal_degrees(97,i*2,0))) #print "answer points", points answer = frozenset(points) for point in points: found = False for result in results: if num.allclose(point, result): found = True break if not found: assert False
def trial(self, num_of_points=20000, maxArea=1000, max_points_per_cell=13, is_fit=True, use_file_type=None, blocking_len=500000, segments_in_mesh=True, save=False, verbose=False, run_profile=False, gridded=True, geo_ref=True): ''' num_of_points ''' if geo_ref is True: geo = Geo_reference(xllcorner=2.0, yllcorner=2.0) else: geo = None mesh_dict = self._build_regular_mesh_dict(maxArea=maxArea, is_segments=segments_in_mesh, save=save, geo=geo) points_dict = self._build_points_dict(num_of_points=num_of_points, gridded=gridded, verbose=verbose) if is_fit is True: op = "Fit_" else: op = "Interp_" profile_file = op + "P" + str(num_of_points) + \ "T" + str(len(mesh_dict['triangles'])) + \ "PPC" + str(max_points_per_cell) + \ ".txt" # Apply the geo_ref to the points, so they are relative # Pass in the geo_ref domain = Domain(mesh_dict['vertices'], mesh_dict['triangles'], use_cache=False, verbose=verbose, geo_reference=geo) #Initial time and memory t0 = time.time() #m0 = None on windows m0 = mem_usage() # Apply the geo_ref to the points, so they are relative # Pass in the geo_ref geospatial = Geospatial_data(points_dict['points'], points_dict['point_attributes'], geo_reference=geo) del points_dict if is_fit is True: if use_file_type == None: points = geospatial filename = None else: #FIXME (DSG) check that the type fileName = tempfile.mktemp("." + use_file_type) geospatial.export_points_file(fileName, absolute=True) points = None filename = fileName if run_profile is True: s = """domain.set_quantity('elevation',points,filename=filename,use_cache=False)""" pobject = profile.Profile() presult = pobject.runctx(s, vars(sys.modules[__name__]), vars()) prof_file = tempfile.mktemp(".prof") presult.dump_stats(prof_file) # # Let process these results S = pstats.Stats(prof_file) saveout = sys.stdout pfile = open(profile_file, "w") sys.stdout = pfile s = S.sort_stats('cumulative').print_stats(60) sys.stdout = saveout pfile.close() os.remove(prof_file) else: domain.set_quantity('elevation', points, filename=filename, use_cache=False, verbose=verbose) if not use_file_type == None: os.remove(fileName) else: # run an interploate problem. if run_profile: # pass in the geospatial points # and the mesh origin s = """benchmark_interpolate(mesh_dict['vertices'],mesh_dict['vertex_attributes'],mesh_dict['triangles'],geospatial,max_points_per_cell=max_points_per_cell,mesh_origin=geo)""" pobject = profile.Profile() presult = pobject.runctx(s, vars(sys.modules[__name__]), vars()) prof_file = tempfile.mktemp(".prof") presult.dump_stats(prof_file) # # Let process these results S = pstats.Stats(prof_file) saveout = sys.stdout pfile = open(profile_file, "w") sys.stdout = pfile s = S.sort_stats('cumulative').print_stats(60) sys.stdout = saveout pfile.close() os.remove(prof_file) else: # pass in the geospatial points benchmark_interpolate(mesh_dict['vertices'], mesh_dict['vertex_attributes'], mesh_dict['triangles'], geospatial, mesh_origin=geo, max_points_per_cell=max_points_per_cell, verbose=verbose) time_taken_sec = (time.time() - t0) m1 = mem_usage() if m0 is None or m1 is None: memory_used = None else: memory_used = (m1 - m0) #print 'That took %.2f seconds' %time_taken_sec # return the times spent in first cell searching and # backing up. #search_one_cell_time, search_more_cells_time = search_times() #reset_search_times() #print "bench - build_quadtree_time", get_build_quadtree_time() return time_taken_sec, memory_used, len(mesh_dict['triangles']), \ get_build_quadtree_time()
print 'project.bounding_polygon', project.bounding_polygon print 'project.combined_elevation_basename', project.combined_elevation_basename # Create Geospatial data from ASCII files geospatial_data = {} for filename in project.ascii_grid_filenames: absolute_filename = join(project.topographies_folder, filename) convert_dem_from_ascii2netcdf(absolute_filename, basename_out=absolute_filename, use_cache=True, verbose=True) dem2pts(absolute_filename, use_cache=True, verbose=True) G_grid = Geospatial_data(file_name=absolute_filename+'.pts', verbose=True) print 'Clip geospatial object' geospatial_data[filename] = G_grid.clip(project.bounding_polygon) # Create Geospatial data from TXT files for filename in project.point_filenames: absolute_filename = join(project.topographies_folder, filename) G_points = Geospatial_data(file_name=absolute_filename, verbose=True) print 'Clip geospatial object' geospatial_data[filename] = G_points.clip(project.bounding_polygon) #------------------------------------------------------------------------------- # Combine, clip and export dataset #-------------------------------------------------------------------------------
if isinstance(mesh_dict['vertex_attributes'], num.ndarray): old_point_attributes = mesh_dict['vertex_attributes'].tolist() else: old_point_attributes = mesh_dict['vertex_attributes'] if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray): old_title_list = mesh_dict['vertex_attribute_titles'].tolist() else: old_title_list = mesh_dict['vertex_attribute_titles'] if verbose: log.critical('tsh file %s loaded' % mesh_file) # load in the points file try: geo = Geospatial_data(point_file, verbose=verbose) except IOError, e: if display_errors: log.critical("Could not load bad file: %s" % str(e)) raise IOError #Re-raise exception point_coordinates = geo.get_data_points(absolute=True) title_list, point_attributes = concatinate_attributelist( \ geo.get_all_attributes()) if mesh_dict.has_key('geo_reference') and \ not mesh_dict['geo_reference'] is None: mesh_origin = mesh_dict['geo_reference'].get_origin() else: mesh_origin = None
def fit(self, point_coordinates_or_filename=None, z=None, verbose=False, point_origin=None, attribute_name=None, max_read_lines=1e7): """Fit a smooth surface to given 1d array of data points z. The smooth surface is computed at each vertex in the underlying mesh using the formula given in the module doc string. Inputs: point_coordinates_or_filename: The co-ordinates of the data points. A filename of a .pts file or a List of coordinate pairs [x, y] of data points or an nx2 numeric array or a Geospatial_data object or points file filename z: Single 1d vector or array of data at the point_coordinates. """ if isinstance(point_coordinates_or_filename, basestring): if point_coordinates_or_filename[-4:] != ".pts": use_blocking_option2 = False # NOTE PADARN 29/03/13: File reading from C has been removed. Now # the input is either a set of points, or a filename which is then # handled by the Geospatial_data object if verbose: print('Fit.fit: Initializing') # Use blocking to load in the point info if isinstance(point_coordinates_or_filename, basestring): msg = "Don't set a point origin when reading from a file" assert point_origin is None, msg filename = point_coordinates_or_filename G_data = Geospatial_data(filename, max_read_lines=max_read_lines, load_file_now=False, verbose=verbose) for i, geo_block in enumerate(G_data): # Build the array points = geo_block.get_data_points(absolute=True) z = geo_block.get_attributes(attribute_name=attribute_name) self._build_matrix_AtA_Atz(points, z, attribute_name, verbose) point_coordinates = None if verbose: print('') else: point_coordinates = point_coordinates_or_filename # This condition either means a filename was read or the function # recieved a None as input if point_coordinates is None: if verbose: log.critical('Fit.fit: Warning: no data points in fit') msg = 'No interpolation matrix.' assert self.AtA is not None, msg assert self.Atz is not None else: point_coordinates = ensure_absolute(point_coordinates, geo_reference=point_origin) # if isinstance(point_coordinates,Geospatial_data) and z is None: # z will come from the geo-ref self._build_matrix_AtA_Atz(point_coordinates, z, verbose=verbose, output='counter') # Check sanity m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex) n = self.point_count if n < m and self.alpha == 0.0: msg = 'ERROR (least_squares): Too few data points\n' msg += 'There are only %d data points and alpha == 0. ' % n msg += 'Need at least %d\n' % m msg += 'Alternatively, set smoothing parameter alpha to a small ' msg += 'positive value,\ne.g. 1.0e-3.' raise TooFewPointsError(msg) self._build_coefficient_matrix_B(verbose) loners = self.mesh.get_lone_vertices() # FIXME - make this as error message. # test with # Not_yet_test_smooth_att_to_mesh_with_excess_verts. if len(loners) > 0: msg = 'WARNING: (least_squares): \nVertices with no triangles\n' msg += 'All vertices should be part of a triangle.\n' msg += 'In the future this will be inforced.\n' msg += 'The following vertices are not part of a triangle;\n' msg += str(loners) log.critical(msg) #raise VertsWithNoTrianglesError(msg) return conjugate_gradient(self.B, self.Atz, self.Atz, imax=2 * len(self.Atz) + 1000, use_c_cg=self.use_c_cg, precon=self.cg_precon)
def test_URS_points_northern_hemisphere(self): LL_LAT = 8.0 LL_LONG = 97.0 GRID_SPACING = 2.0 / 60.0 LAT_AMOUNT = 2 LONG_AMOUNT = 2 ZONE = 47 # points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8, 1 + i * 2, 0), degminsec2decimal_degrees(97, 1 + i * 2, 0))) #print "points", points geo_poly = Geospatial_data(data_points=points, points_are_lats_longs=True) poly_lat_long = geo_poly.get_data_points(as_lat_long=False, isSouthHemisphere=False) #print "seg_lat_long", poly_lat_long # geo=URS_points_needed_to_file('test_example_poly3', poly_lat_long, # ZONE, # LL_LAT, LL_LONG, # GRID_SPACING, # LAT_AMOUNT, LONG_AMOUNT, # isSouthernHemisphere=False, # export_csv=True, # verbose=self.verbose) geo = calculate_boundary_points(poly_lat_long, ZONE, LL_LAT, LL_LONG, GRID_SPACING, LAT_AMOUNT, LONG_AMOUNT, isSouthHemisphere=False, verbose=self.verbose) results = frozenset( geo.get_data_points(as_lat_long=True, isSouthHemisphere=False)) #print 'results',results # These are a set of points that have to be in results points = [] for i in range(2): for j in range(2): points.append((degminsec2decimal_degrees(8, i * 2, 0), degminsec2decimal_degrees(97, i * 2, 0))) #print "answer points", points answer = frozenset(points) for point in points: found = False for result in results: if num.allclose(point, result): found = True break if not found: assert False