def parabolic_solve(self, u_in, b, a = None, u_out = None, update_matrix=True, \ imax=10000, tol=1.0e-8, atol=1.0e-8, iprint=None, output_stats=False): """ Solve for u in the equation ( I + dt div a grad ) u = b u | boundary = g u_in, u_out, f anf g are Quantity objects Dirichlet BC g encoded into u_in boundary_values Initial guess for iterative scheme is given by centroid values of u_in Centroid values of a and b provide diffusivity and rhs Solution u is retruned in u_out """ if u_out is None: u_out = Quantity(self.domain) if update_matrix: self.update_elliptic_matrix(a) self.update_elliptic_boundary_term(u_in) self.set_parabolic_solve(True) # Pull out arrays and a matrix operator IdtA = self rhs = b.centroid_values + (self.dt * self.boundary_term) x0 = u_in.centroid_values x, stats = conjugate_gradient(IdtA, rhs, x0, imax=imax, tol=tol, atol=atol, iprint=iprint, output_stats=True) self.set_parabolic_solve(False) u_out.set_values(x, location='centroids') u_out.set_boundary_values(u_in.boundary_values) if output_stats: return u_out, stats else: return u_out
def parabolic_solve(self, u_in, b, a = None, u_out = None, update_matrix=True, \ imax=10000, tol=1.0e-8, atol=1.0e-8, iprint=None, output_stats=False): """ Solve for u in the equation ( I + dt div a grad ) u = b u | boundary = g u_in, u_out, f anf g are Quantity objects Dirichlet BC g encoded into u_in boundary_values Initial guess for iterative scheme is given by centroid values of u_in Centroid values of a and b provide diffusivity and rhs Solution u is retruned in u_out """ if u_out is None: u_out = Quantity(self.domain) if update_matrix : self.update_elliptic_matrix(a) self.update_elliptic_boundary_term(u_in) self.set_parabolic_solve(True) # Pull out arrays and a matrix operator IdtA = self rhs = b.centroid_values + (self.dt * self.boundary_term) x0 = u_in.centroid_values x, stats = conjugate_gradient(IdtA,rhs,x0,imax=imax, tol=tol, atol=atol, iprint=iprint, output_stats=True) self.set_parabolic_solve(False) u_out.set_values(x, location='centroids') u_out.set_boundary_values(u_in.boundary_values) if output_stats: return u_out, stats else: return u_out
def fit(self, point_coordinates_or_filename=None, z=None, verbose=False, point_origin=None, attribute_name=None, max_read_lines=1e7): """Fit a smooth surface to given 1d array of data points z. The smooth surface is computed at each vertex in the underlying mesh using the formula given in the module doc string. Inputs: point_coordinates_or_filename: The co-ordinates of the data points. A filename of a .pts file or a List of coordinate pairs [x, y] of data points or an nx2 numeric array or a Geospatial_data object or points file filename z: Single 1d vector or array of data at the point_coordinates. """ if isinstance(point_coordinates_or_filename, basestring): if point_coordinates_or_filename[-4:] != ".pts": use_blocking_option2 = False # NOTE PADARN 29/03/13: File reading from C has been removed. Now # the input is either a set of points, or a filename which is then # handled by the Geospatial_data object if verbose: print 'Fit.fit: Initializing' # Use blocking to load in the point info if isinstance(point_coordinates_or_filename, basestring): msg = "Don't set a point origin when reading from a file" assert point_origin is None, msg filename = point_coordinates_or_filename G_data = Geospatial_data(filename, max_read_lines=max_read_lines, load_file_now=False, verbose=verbose) for i, geo_block in enumerate(G_data): # Build the array points = geo_block.get_data_points(absolute=True) z = geo_block.get_attributes(attribute_name=attribute_name) self._build_matrix_AtA_Atz(points, z, attribute_name, verbose) point_coordinates = None if verbose: print '' else: point_coordinates = point_coordinates_or_filename # This condition either means a filename was read or the function # recieved a None as input if point_coordinates is None: if verbose: log.critical('Fit.fit: Warning: no data points in fit') msg = 'No interpolation matrix.' assert self.AtA is not None, msg assert self.Atz is not None else: point_coordinates = ensure_absolute(point_coordinates, geo_reference=point_origin) # if isinstance(point_coordinates,Geospatial_data) and z is None: # z will come from the geo-ref self._build_matrix_AtA_Atz(point_coordinates, z, verbose=verbose, output='counter') # Check sanity m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex) n = self.point_count if n < m and self.alpha == 0.0: msg = 'ERROR (least_squares): Too few data points\n' msg += 'There are only %d data points and alpha == 0. ' % n msg += 'Need at least %d\n' % m msg += 'Alternatively, set smoothing parameter alpha to a small ' msg += 'positive value,\ne.g. 1.0e-3.' raise TooFewPointsError(msg) self._build_coefficient_matrix_B(verbose) loners = self.mesh.get_lone_vertices() # FIXME - make this as error message. # test with # Not_yet_test_smooth_att_to_mesh_with_excess_verts. if len(loners) > 0: msg = 'WARNING: (least_squares): \nVertices with no triangles\n' msg += 'All vertices should be part of a triangle.\n' msg += 'In the future this will be inforced.\n' msg += 'The following vertices are not part of a triangle;\n' msg += str(loners) log.critical(msg) #raise VertsWithNoTrianglesError(msg) return conjugate_gradient(self.B, self.Atz, self.Atz, imax=2 * len(self.Atz) + 1000, use_c_cg=self.use_c_cg, precon=self.cg_precon)
def fit(self, point_coordinates_or_filename=None, z=None, verbose=False, point_origin=None, attribute_name=None, max_read_lines=1e7): """Fit a smooth surface to given 1d array of data points z. The smooth surface is computed at each vertex in the underlying mesh using the formula given in the module doc string. Inputs: point_coordinates_or_filename: The co-ordinates of the data points. A filename of a .pts file or a List of coordinate pairs [x, y] of data points or an nx2 numeric array or a Geospatial_data object or points file filename z: Single 1d vector or array of data at the point_coordinates. """ if isinstance(point_coordinates_or_filename, basestring): if point_coordinates_or_filename[-4:] != ".pts": use_blocking_option2 = False # NOTE PADARN 29/03/13: File reading from C has been removed. Now # the input is either a set of points, or a filename which is then # handled by the Geospatial_data object if verbose: print 'Fit.fit: Initializing' # Use blocking to load in the point info if isinstance(point_coordinates_or_filename, basestring): msg = "Don't set a point origin when reading from a file" assert point_origin is None, msg filename = point_coordinates_or_filename G_data = Geospatial_data(filename, max_read_lines=max_read_lines, load_file_now=False, verbose=verbose) for i, geo_block in enumerate(G_data): # Build the array points = geo_block.get_data_points(absolute=True) z = geo_block.get_attributes(attribute_name=attribute_name) self._build_matrix_AtA_Atz(points, z, attribute_name, verbose) point_coordinates = None if verbose: print '' else: point_coordinates = point_coordinates_or_filename # This condition either means a filename was read or the function # recieved a None as input if point_coordinates is None: if verbose: log.critical('Fit.fit: Warning: no data points in fit') msg = 'No interpolation matrix.' assert self.AtA is not None, msg assert self.Atz is not None else: point_coordinates = ensure_absolute(point_coordinates, geo_reference=point_origin) # if isinstance(point_coordinates,Geospatial_data) and z is None: # z will come from the geo-ref self._build_matrix_AtA_Atz(point_coordinates, z, verbose=verbose, output='counter') # Check sanity m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex) n = self.point_count if n < m and self.alpha == 0.0: msg = 'ERROR (least_squares): Too few data points\n' msg += 'There are only %d data points and alpha == 0. ' % n msg += 'Need at least %d\n' % m msg += 'Alternatively, set smoothing parameter alpha to a small ' msg += 'positive value,\ne.g. 1.0e-3.' raise TooFewPointsError(msg) self._build_coefficient_matrix_B(verbose) loners = self.mesh.get_lone_vertices() # FIXME - make this as error message. # test with # Not_yet_test_smooth_att_to_mesh_with_excess_verts. if len(loners) > 0: msg = 'WARNING: (least_squares): \nVertices with no triangles\n' msg += 'All vertices should be part of a triangle.\n' msg += 'In the future this will be inforced.\n' msg += 'The following vertices are not part of a triangle;\n' msg += str(loners) log.critical(msg) #raise VertsWithNoTrianglesError(msg) return conjugate_gradient(self.B, self.Atz, self.Atz, imax=2 * len(self.Atz)+1000, use_c_cg=self.use_c_cg, precon=self.cg_precon)