def _disperse_step(nodes, rho, fixed_nodes, neighbors, delta): ''' Returns the new position of the free nodes after a dispersal step. This does not handle node intersections with the boundary. ''' if nodes.shape[0] == 0: # If there are no nodes, avoid errors resulting from zero sized arrays. return nodes.copy() all_nodes = np.vstack((nodes, fixed_nodes)) # find index and distance to nearest nodes dist, idx = KDTree(all_nodes).query(nodes, neighbors + 1) # dont consider a node to be one of its own nearest neighbors dist, idx = dist[:, 1:], idx[:, 1:] # compute the force proportionality constant between each node # based on their charges c = 1.0/(rho(all_nodes)[idx, None]*rho(nodes)[:, None, None]) # calculate forces on each node resulting from the neighboring nodes. This # will result in a division by zero warning if there are duplicate nodes. # Do not suppress the warning because it is a real problem. forces = c*(nodes[:, None, :] - all_nodes[idx, :])/dist[:, :, None]**3 # sum up all the forces for each node to get the direction that the nodes # should move. direction = np.sum(forces, axis=1) # normalize the direction to one. It is possible that the net force is # exactly zero. In that case, the node should not move. with np.errstate(invalid='ignore'): direction /= np.linalg.norm(direction, axis=1)[:, None] direction = np.nan_to_num(direction) # move by an amount proportional to the distance to the nearest neighbor. step = delta*dist[:, 0, None]*direction # new node positions out = nodes + step return out
def _check_spacing(nodes, rho=None): ''' Check if any nodes are unusually close to eachother. If so, a warning will be printed. ''' n, dim = nodes.shape if rho is None: def rho(x): return np.ones(x.shape[0]) # distance to nearest neighbor dist = KDTree(nodes).query(nodes, 2)[0][:, 1] dist_is_zero = (dist == 0.0) if np.any(dist_is_zero): indices, = dist_is_zero.nonzero() for idx in indices: logger.warning( 'Node %s (%s) is in the same location as another node.' % (idx, nodes[idx]) ) density = 1.0/dist**dim normalized_density = np.log10(density / rho(nodes)) percs = np.percentile(normalized_density, [10, 50, 90]) med = percs[1] idr = percs[2] - percs[0] is_too_close = normalized_density < (med - 2*idr) if np.any(is_too_close): indices, = is_too_close.nonzero() for idx in indices: logger.warning( 'Node %s (%s) is unusually close to a neighboring node.' % (idx, nodes[idx]) )
def _disperse(nodes, rho, fixed_nodes, neighbors, delta): ''' Returns the new position of the free nodes after a dispersal step. This does not handle node intersections with the boundary. ''' # form collection of all nodes all_nodes = np.vstack((nodes, fixed_nodes)) # find index and distance to nearest nodes d, i = KDTree(all_nodes).query(nodes, neighbors) # dont consider a node to be one of its own nearest neighbors d, i = d[:, 1:], i[:, 1:] # compute the force proportionality constant between each node # based on their charges c = 1.0/(rho(all_nodes)[i, None]*rho(nodes)[:, None, None]) # calculate forces on each node resulting from the `m` nearest # nodes. forces = c*(nodes[:, None, :] - all_nodes[i, :])/d[:, :, None]**3 # sum up all the forces for each node direction = np.sum(forces, axis=1) # normalize the net forces to one direction /= np.linalg.norm(direction, axis=1)[:, None] # in the case of a zero vector replace nans with zeros direction = np.nan_to_num(direction) # move in the direction of the force by an amount proportional to # the distance to the nearest neighbor step = delta*d[:, 0, None]*direction # new node positions out = nodes + step return out
def __init__(self, y, d, sigma=0.0, k=20, phi='phs3', eps=1.0, order=None): y = np.asarray(y, dtype=float) assert_shape(y, (None, None), 'y') ny, ndim = y.shape d = np.asarray(d, dtype=float) assert_shape(d, (ny, ), 'd') if np.isscalar(sigma): sigma = np.full(ny, sigma, dtype=float) else: sigma = np.asarray(sigma, dtype=float) assert_shape(sigma, (ny, ), 'sigma') # make sure the number of nearest neighbors used for interpolation does not # exceed the number of observations k = min(int(k), ny) phi = get_rbf(phi) if isinstance(phi, SparseRBF): raise ValueError('SparseRBF instances are not supported') if not np.isscalar(eps): raise ValueError('The shape parameter should be a float') min_order = _MIN_ORDER.get(phi, -1) if order is None: order = max(min_order, 0) elif order < min_order: logger.warning( 'The polynomial order should not be below %d for %s in order for the ' 'interpolant to be well-posed' % (min_order, phi)) order = int(order) nmonos = monomial_count(order, ndim) if nmonos > k: raise ValueError( 'The polynomial order is too high. The number of monomials, %d, ' 'exceeds the number of neighbors used for interpolation, %d' % (nmonos, k)) tree = KDTree(y) self.y = y self.d = d self.sigma = sigma self.k = k self.eps = eps self.phi = phi self.order = order self.tree = tree
def neighbor_argsort(nodes, m=None): ''' Returns a permutation array that sorts `nodes` so that each node and its `m` nearest neighbors are close together in memory. This is done through the use of a KD Tree and the Reverse Cuthill-McKee algorithm. Parameters ---------- nodes : (n, d) float array m : int, optional Returns ------- (N,) int array Examples -------- >>> nodes = np.array([[0.0, 1.0], [2.0, 1.0], [1.0, 1.0]]) >>> idx = neighbor_argsort(nodes, 2) >>> nodes[idx] array([[ 2., 1.], [ 1., 1.], [ 0., 1.]]) ''' nodes = np.asarray(nodes, dtype=float) assert_shape(nodes, (None, None), 'nodes') if m is None: # this should be roughly equal to the stencil size for the RBF-FD # problem m = 5**nodes.shape[1] m = min(m, nodes.shape[0]) # find the indices of the nearest m nodes for each node _, idx = KDTree(nodes).query(nodes, m) # efficiently form adjacency matrix col = idx.ravel() row = np.repeat(np.arange(nodes.shape[0]), m) data = np.ones(nodes.shape[0]*m, dtype=bool) mat = csc_matrix((data, (row, col)), dtype=bool) permutation = reverse_cuthill_mckee(mat) return permutation
def __init__(self, y, d, sigma=0.0, k=20, phi='phs3', eps=1.0, order=None): y, d, sigma, phi, eps, order, k = _sanitize_arguments( y, d, sigma, phi, eps, order, k) if isinstance(phi, SparseRBF): raise ValueError('`SparseRBF` instances are not supported.') tree = KDTree(y) self.y = y self.d = d self.sigma = sigma self.k = k self.eps = eps self.phi = phi self.order = order self.tree = tree
def prepare_nodes(nodes, domain, rho=None, iterations=20, neighbors=None, dispersion_delta=0.1, pinned_nodes=None, snap_delta=0.5, boundary_groups=None, boundary_groups_with_ghosts=None, ghost_delta=0.5, include_vertices=False, orient_simplices=True): ''' Prepares a set of nodes for solving PDEs with the RBF and RBF-FD method. This includes: dispersing the nodes away from eachother to ensure a more even spacing, snapping nodes to the boundary, determining the normal vectors for each node, determining the group that each node belongs to, creating ghost nodes, sorting the nodes so that adjacent nodes are close in memory, and verifying that no two nodes are anomalously close to eachother. The function returns a set of nodes, the normal vectors for each node, and a dictionary identifying which group each node belongs to. Parameters ---------- nodes : (n, d) float arrary An initial sampling of nodes within the domain domain : (p, d) float array and (q, d) int array Vertices of the domain and connectivity of the vertices rho : function, optional Node density function. Takes a (n, d) array of coordinates and returns an (n,) array of desired node densities at those coordinates. This is used during the node dispersion step. iterations : int, optional Number of dispersion iterations. neighbors : int, optional Number of neighboring nodes to use when calculating the repulsion force. This defaults to 3 for 2D nodes and 4 for 3D nodes. dispersion_delta : float, optional Scaling factor for the node step size in each iteration. The step size is equal to `dispersion_delta` times the distance to the nearest neighbor. pinned_nodes : (k, d) array, optional Nodes which do not move and only provide a repulsion force. These nodes are included in the set of nodes returned by this function and they are in the group named "pinned". snap_delta : float, optional Controls the maximum snapping distance. The maximum snapping distance for each node is `snap_delta` times the distance to the nearest neighbor. This defaults to 0.5. boundary_groups: dict, optional Dictionary defining the boundary groups. The keys are the names of the groups and the values are lists of simplex indices making up each group. This function will return a dictionary identifying which nodes belong to each boundary group. By default, there is a single group named 'all' for the entire boundary. Specifically, The default value is `{'all':range(len(smp))}`. boundary_groups_with_ghosts: list of strs, optional List of boundary groups that will be given ghost nodes. By default, no boundary groups are given ghost nodes. The groups specified here must exist in `boundary_groups`. ghost_delta : float, optional How far the ghost nodes should be from their corresponding boundary node. The distance is `ghost_delta` times the distance to the nearest neighbor. include_vertices : bool, optional If `True`, then the vertices will be included in the output nodes. Each vertex will be assigned to the boundary group that its adjoining simplices are part of. If the simplices are in multiple groups, then the vertex will be assigned to the group containing the simplex that comes first in `smp`. orient_simplices : bool, optional If `False` then it is assumed that the simplices are already oriented such that their normal vectors point outward. Returns ------- (m, d) float array Nodes positions dict The indices of nodes belonging to each group. There will always be a group called 'interior' containing the nodes that are not on the boundary. By default there is a group containing all the boundary nodes called 'boundary:all'. If `boundary_groups` was specified, then those groups will be included in this dictionary and their names will be given a 'boundary:' prefix. If `boundary_groups_with_ghosts` was specified then those groups of ghost nodes will be included in this dictionary and their names will be given a 'ghosts:' prefix. (n, d) float array Outward normal vectors for each node. If a node is not on the boundary then its corresponding row will contain NaNs. ''' domain = as_domain(domain) if orient_simplices: logger.debug('Orienting simplices...') domain.orient_simplices() logger.debug('Done') nodes = np.asarray(nodes, dtype=float) assert_shape(nodes, (None, domain.dim), 'nodes') # the `fixed_nodes` are used to provide a repulsion force during # dispersion, but they do not move. fixed_nodes = np.zeros((0, domain.dim), dtype=float) if pinned_nodes is not None: pinned_nodes = np.asarray(pinned_nodes, dtype=float) assert_shape(pinned_nodes, (None, domain.dim), 'pinned_nodes') fixed_nodes = np.vstack((fixed_nodes, pinned_nodes)) if include_vertices: fixed_nodes = np.vstack((fixed_nodes, domain.vertices)) logger.debug('Dispersing nodes...') nodes = disperse( nodes, domain, iterations=iterations, rho=rho, fixed_nodes=fixed_nodes, neighbors=neighbors, delta=dispersion_delta ) logger.debug('Done') # append the domain vertices to the collection of nodes if requested if include_vertices: nodes = np.vstack((nodes, domain.vertices)) # snap nodes to the boundary, identifying which simplex each node # was snapped to logger.debug('Snapping nodes to boundary...') nodes, smpid = domain.snap(nodes, delta=snap_delta) logger.debug('Done') normals = np.full_like(nodes, np.nan) normals[smpid >= 0] = domain.normals[smpid[smpid >= 0]] # create a dictionary identifying which nodes belong to which group groups = {} groups['interior'], = (smpid == -1).nonzero() # append the user specified pinned nodes if pinned_nodes is not None: pinned_idx = np.arange(pinned_nodes.shape[0]) + nodes.shape[0] pinned_normals = np.full_like(pinned_nodes, np.nan) nodes = np.vstack((nodes, pinned_nodes)) normals = np.vstack((normals, pinned_normals)) groups['pinned'] = pinned_idx logger.debug('Grouping boundary nodes...') if boundary_groups is None: boundary_groups = {'all': np.arange(len(domain.simplices))} else: boundary_groups = { str(k): np.array(v, dtype=int) for k, v in boundary_groups.items() } # Validate the user-specified boundary groups simplex_counts = Counter(chain(*boundary_groups.values())) for idx in range(len(domain.simplices)): if simplex_counts[idx] != 1: logger.warning( 'Simplex %s is specified %s times in the boundary groups.' % (idx, simplex_counts[idx]) ) extra = set(simplex_counts).difference(range(len(domain.simplices))) if extra: raise ValueError( 'The simplex indices %s were specified in the boundary groups ' 'but do not exist.' % extra ) if boundary_groups_with_ghosts is None: boundary_groups_with_ghosts = [] # find the mapping from simplex indices to node indices, then use # `boundary_groups` to find which nodes belong to each boundary group smp_to_nodes = [[] for _ in range(len(domain.simplices))] for i, j in enumerate(smpid): if j != -1: smp_to_nodes[j].append(i) for bnd_name, bnd_smp in boundary_groups.items(): bnd_idx = list(chain.from_iterable(smp_to_nodes[i] for i in bnd_smp)) groups['boundary:%s' % bnd_name] = np.array(bnd_idx, dtype=int) logger.debug('Done') logger.debug('Creating ghost nodes...') tree = KDTree(nodes) for bnd_name in boundary_groups_with_ghosts: bnd_idx = groups['boundary:%s' % bnd_name] spacing = ghost_delta*tree.query(nodes[bnd_idx], 2)[0][:, 1] ghost_idx = np.arange(bnd_idx.shape[0]) + nodes.shape[0] ghost_nodes = nodes[bnd_idx] + spacing[:, None]*normals[bnd_idx] ghost_normals = np.full_like(ghost_nodes, np.nan) nodes = np.vstack((nodes, ghost_nodes)) normals = np.vstack((normals, ghost_normals)) groups['ghosts:%s' % bnd_name] = ghost_idx logger.debug('Done') logger.debug('Sorting nodes...') sort_idx = neighbor_argsort(nodes) nodes = nodes[sort_idx] normals = normals[sort_idx] reverse_sort_idx = np.argsort(sort_idx) groups = {k: reverse_sort_idx[v] for k, v in groups.items()} logger.debug('Done') logger.debug('Checking the quality of the generated nodes...') _check_spacing(nodes, rho) logger.debug('Done') return nodes, groups, normals
def periodic_weight_matrix(bbox, x, p, n, diffs, coeffs=None, phi=phs3, order=None, eps=1.0, stencils=None): ''' Returns a periodic weight matrix which maps a functions values at `p` to an approximation of that functions derivative at `x` and the function is periodic over the length of `bbox`. This is a convenience function which first creates stencils and then computes the RBF-FD weights for each stencil. Parameters ---------- bbox : (2,D) array Bounding box of the target points. Only for those spatial dimensions where the bounding box has finite length will the returned weight matrix have periodic stencils. x : (N, D) array Target points where the derivatives will be approximated. p : (M, D) array Source points. The derivatives will be approximated with a weighted sum of values at these point. n : int The stencil size diffs : (D,) int array or (K, D) int array Derivative orders for each spatial dimension. For example `[2, 0]` indicates that the weights should approximate the second derivative with respect to the first spatial dimension in two-dimensional space. diffs can also be a (K, D) array, where each (D,) sub-array is a term in a differential operator. For example the two-dimensional Laplacian can be represented as `[[2, 0], [0, 2]]`. coeffs : (K,) float array or (K, N) float, optional Coefficients for each term in the differential operator specified with `diffs`. Defaults to an array of ones. If `diffs` was specified as a (D,) array then `coeffs` should be a length 1 array. If the coefficients for the differential operator vary with `x` then `coeffs` can be specified as a (K, N) array. phi : rbf.basis.RBF, optional Type of RBF. Select from those available in `rbf.basis` or create your own. order : int, optional Order of the added polynomial. This defaults to the highest derivative order. For example, if `diffs` is `[[2, 0], [0, 1]]`, then `order` is set to 2. eps : float or (M,) array, optional shape parameter for each RBF, which have centers `p`. This only makes a difference when using RBFs that are not scale invariant. All the predefined RBFs except for the odd order polyharmonic splines are not scale invariant. Returns ------- (N, M) coo sparse matrix Examples -------- Create a periodic second order differentiation matrix in one-dimensional space >>> x = np.arange(4.0)[:, None] >>> bbox = np.array([[0.0],[4.0]]) >>> W = periodic_weight_matrix(bbox,x, x, 3, (2,)) >>> W.toarray() array([[-2., 1., 0., 1.], [ 1., -2., 1., 0.], [ 0., 1., -2., 1.], [ 1., 0., 1., -2.]]) ''' bbox = np.asarray(bbox, dtype=float) assert_shape(bbox, (2, None), 'bbox') x = np.asarray(x, dtype=float) assert_shape(x, (None, bbox.shape[1]), 'x') p = np.asarray(p, dtype=float) assert_shape(p, (None, x.shape[1]), 'p') tp, nt = _tiled_point_cloud(bbox, p) diffs = np.asarray(diffs, dtype=int) diffs = _reshape_diffs(diffs) if np.isscalar(eps): eps = np.full(tp.shape[0], eps, dtype=float) else: eps = np.tile(np.asarray(eps, dtype=float), (nt, 1)) assert_shape(eps, (tp.shape[0], ), 'eps') # make `coeffs` a (K, N) array if coeffs is None: coeffs = np.ones((diffs.shape[0], tp.shape[0]), dtype=float) else: coeffs = np.asarray(coeffs, dtype=float) if coeffs.ndim == 1: coeffs = np.repeat(coeffs[:, None], tp.shape[0], axis=1) assert_shape(coeffs, (diffs.shape[0], tp.shape[0]), 'coeffs') stencils = KDTree(tp).query(x, n)[1][0:x.shape[0]] logger.debug( 'building a (%s, %s) RBF-FD weight matrix with %s nonzeros...' % (x.shape[0], p.shape[0], stencils.size)) # values that will be put into the sparse matrix data = np.zeros((x.shape[0], stencils.shape[1]), dtype=float) for i, si in enumerate(stencils): # intermittently log the progress if i % max(x.shape[0] // 10, 1) == 0: logger.debug(' %d%% complete' % (100 * i / x.shape[0])) data[i, :] = weights(x[i], tp[si], diffs, coeffs=coeffs[:, i], eps=eps[si], phi=phi, order=order) rows = np.repeat(range(data.shape[0]), data.shape[1]) # using the remainder we recover the indexes of the stencils in the original domain cols = np.remainder(stencils, x.shape[0]).ravel() data = data.ravel() shape = x.shape[0], p.shape[0] L = sp.coo_matrix((data, (rows, cols)), shape) logger.debug(' done') return L
def snap(self, points, delta=0.5): ''' Snaps `points` to the nearest points on the boundary if they are sufficiently close to the boundary. A point is sufficiently close if the distance to the boundary is less than `delta` times the distance to its nearest neighbor. Parameters ---------- points : (n, d) float array delta : float, optional Returns ------- (n, d) float array The new points after snapping to the boundary (n,) int array The simplex that the points are snapped to. If a point is not snapped to the boundary then its corresponding value will be -1. ''' points = np.asarray(points, dtype=float) assert_shape(points, (None, self.dim), 'points') n = points.shape[0] out_smpid = np.full(n, -1, dtype=int) out_points = np.array(points, copy=True) nbr_dist = KDTree(points).query(points, 2)[0][:, 1] snap_dist = delta*nbr_dist if self.rtree is None: nrst_pnt, nrst_smpid = geo.nearest_point( points, self.vertices, self.simplices) nrst_dist = np.linalg.norm(nrst_pnt - points, axis=1) snap = nrst_dist < snap_dist out_points[snap] = nrst_pnt[snap] out_smpid[snap] = nrst_smpid[snap] else: # creating bounding boxes around the snapping regions for # each point bounds = np.hstack((points - snap_dist[:, None], points + snap_dist[:, None])) for i, bnd in enumerate(bounds): # get a list of simplices which node i could # potentially snap to potential_smpid = list(self.rtree.intersection(bnd)) # sort the list to ensure consistent output potential_smpid.sort() if not potential_smpid: # no simplices are within the snapping distance continue # get the nearest point to the potential simplices and # the simplex containing the nearest point nrst_pnt, nrst_smpid = geo.nearest_point( points[[i]], self.vertices, self.simplices[potential_smpid]) nrst_dist = np.linalg.norm(points[i] - nrst_pnt[0]) # if the nearest point is within the snapping distance # then snap if nrst_dist < snap_dist[i]: out_points[i] = nrst_pnt[0] out_smpid[i] = potential_smpid[nrst_smpid[0]] return out_points, out_smpid
def weight_matrix(x, p, n, diffs, coeffs=None, phi='phs3', order=None, eps=1.0, chunk_size=1000): ''' Returns a weight matrix which maps a function's values at `p` to an approximation of that function's derivative at `x`. This is a convenience function which first creates stencils and then computes the RBF-FD weights for each stencil. Parameters ---------- x : (N, D) float array Target points where the derivative is being approximated p : (M, D) array Source points. The derivatives will be approximated with a weighted sum of values at these point. n : int The stencil size. Each target point will have a stencil made of the `n` nearest neighbors from `p` diffs : (D,) int array or (K, D) int array Derivative orders for each spatial dimension. For example `[2, 0]` indicates that the weights should approximate the second derivative with respect to the first spatial dimension in two-dimensional space. `diffs` can also be a (K, D) array, where each (D,) sub-array is a term in a differential operator. For example the two-dimensional Laplacian can be represented as `[[2, 0], [0, 2]]`. coeffs : (K,) or (K, N) float array, optional Coefficients for each term in the differential operator specified with `diffs`. The coefficients can vary between target points. Defaults to an array of ones. phi : rbf.basis.RBF instance or str, optional Type of RBF. Select from those available in `rbf.basis` or create your own. order : int, optional Order of the added polynomial. This defaults to the highest derivative order. For example, if `diffs` is `[[2, 0], [0, 1]]`, then this is set to 2. eps : float, optional Shape parameter for each RBF chunk_size : int, optional Break the target points into chunks with this size to reduce the memory requirements Returns ------- (N, M) coo sparse matrix Examples -------- Create a second order differentiation matrix in one-dimensional space >>> x = np.arange(4.0)[:, None] >>> W = weight_matrix(x, x, 3, (2,)) >>> W.toarray() array([[ 1., -2., 1., 0.], [ 1., -2., 1., 0.], [ 0., 1., -2., 1.], [ 0., 1., -2., 1.]]) ''' x = np.asarray(x, dtype=float) assert_shape(x, (None, None), 'x') nx, ndim = x.shape p = np.asarray(p, dtype=float) assert_shape(p, (None, ndim), 'p') diffs = np.asarray(diffs, dtype=int) diffs = np.atleast_2d(diffs) assert_shape(diffs, (None, ndim), 'diffs') if coeffs is None: coeffs = np.ones(len(diffs), dtype=float) else: coeffs = np.asarray(coeffs, dtype=float) assert_shape(coeffs, (len(diffs), ...), 'coeffs') # broadcast each element in `coeffs` to the length of `x` coeffs = np.array([np.broadcast_to(c, (nx,)) for c in coeffs]) _, stencils = KDTree(p).query(x, n) if chunk_size is None: data = weights( x, p[stencils], diffs, coeffs=coeffs, phi=phi, order=order, eps=eps) else: data = np.empty((nx, n), dtype=float) for start in range(0, nx, chunk_size): stop = start + chunk_size data[start:stop] = weights( x[start:stop], p[stencils[start:stop]], diffs, coeffs=coeffs[:, start:stop], phi=phi, order=order, eps=eps) data = data.ravel() rows = np.repeat(range(nx), n) cols = stencils.ravel() out = sp.coo_matrix((data, (rows, cols)), (nx, len(p))) return out
def prepare_nodes(nodes, domain, rho=None, iterations=20, neighbors=None, dispersion_delta=0.1, pinned_nodes=None, snap_delta=0.5, boundary_groups=None, boundary_groups_with_ghosts=None, include_vertices=False, orient_simplices=True): ''' Prepares a set of nodes for solving PDEs with the RBF and RBF-FD method. This includes: dispersing the nodes away from eachother to ensure a more even spacing, snapping nodes to the boundary, determining the normal vectors for each node, determining the group that each node belongs to, creating ghost nodes, sorting the nodes so that adjacent nodes are close in memory, and verifying that no two nodes are anomalously close to eachother. The function returns a set of nodes, the normal vectors for each node, and a dictionary identifying which group each node belongs to. Parameters ---------- nodes : (n, d) float arrary An initial sampling of nodes within the domain domain : (p, d) float array and (q, d) int array Vertices of the domain and connectivity of the vertices rho : function, optional Node density function. Takes a (n, d) array of coordinates and returns an (n,) array of desired node densities at those coordinates. This is used during the node dispersion step. iterations : int, optional Number of dispersion iterations. neighbors : int, optional Number of neighboring nodes to use when calculating the repulsion force. This defaults to 4 for 2D nodes and 5 for 3D nodes. dispersion_delta : float, optional Scaling factor for the node step size in each iteration. The step size is equal to `dispersion_delta` times the distance to the nearest neighbor. pinned_nodes : (k, d) array, optional Nodes which do not move and only provide a repulsion force. These nodes are included in the set of nodes returned by this function and they are in the group named "pinned". snap_delta : float, optional Controls the maximum snapping distance. The maximum snapping distance for each node is `snap_delta` times the distance to the nearest neighbor. This defaults to 0.5. boundary_groups: dict, optional Dictionary defining the boundary groups. The keys are the names of the groups and the values are lists of simplex indices making up each group. This function will return a dictionary identifying which nodes belong to each boundary group. By default, there is a single group named 'all' for the entire boundary. Specifically, The default value is `{'all':range(len(smp))}`. boundary_groups_with_ghosts: list of strs, optional List of boundary groups that will be given ghost nodes. By default, no boundary groups are given ghost nodes. The groups specified here must exist in `boundary_groups`. include_vertices : bool, optional If `True`, then the vertices will be included in the output nodes. Each vertex will be assigned to the boundary group that its adjoining simplices are part of. If the simplices are in multiple groups, then the vertex will be assigned to the group containing the simplex that comes first in `smp`. orient_simplices : bool, optional If `False` then it is assumed that the simplices are already oriented such that their normal vectors point outward. Returns ------- (m, d) float array Nodes positions dict The indices of nodes belonging to each group. There will always be a group called 'interior' containing the nodes that are not on the boundary. By default there is a group containing all the boundary nodes called 'boundary:all'. If `boundary_groups` was specified, then those groups will be included in this dictionary and their names will be given a 'boundary:' prefix. If `boundary_groups_with_ghosts` was specified then those groups of ghost nodes will be included in this dictionary and their names will be given a 'ghosts:' prefix. (n, d) float array Outward normal vectors for each node. If a node is not on the boundary then its corresponding row will contain NaNs. ''' domain = as_domain(domain) nodes = np.asarray(nodes, dtype=float) assert_shape(nodes, (None, domain.dim), 'nodes') # the `fixed_nodes` are used to provide a repulsion force during # dispersion, but they do not move. TODO There is chance that one of # the points in `fixed_nodes` is equal to a point in `nodes`. This # situation should be handled fixed_nodes = np.zeros((0, domain.dim), dtype=float) if pinned_nodes is not None: pinned_nodes = np.asarray(pinned_nodes, dtype=float) assert_shape(pinned_nodes, (None, domain.dim), 'pinned_nodes') fixed_nodes = np.vstack((fixed_nodes, pinned_nodes)) if include_vertices: fixed_nodes = np.vstack((fixed_nodes, domain.vertices)) for i in range(iterations): logger.debug('starting node dispersion iterations %s of %s' % (i + 1, iterations)) nodes = disperse(nodes, domain, rho=rho, fixed_nodes=fixed_nodes, neighbors=neighbors, delta=dispersion_delta) # append the domain vertices to the collection of nodes if requested if include_vertices: nodes = np.vstack((nodes, domain.vertices)) # snap nodes to the boundary, identifying which simplex each node # was snapped to logger.debug('snapping nodes to boundary ...') nodes, smpid = domain.snap(nodes, delta=snap_delta) logger.debug('done') # get the normal vectors for the boundary nodes if orient_simplices: logger.debug('orienting simplices ...') domain.orient_simplices() logger.debug('done') normals = np.full_like(nodes, np.nan) normals[smpid >= 0] = domain.normals[smpid[smpid >= 0]] # create a dictionary identifying which nodes belong to which group groups = {} groups['interior'], = (smpid == -1).nonzero() # append the user specified pinned nodes if pinned_nodes is not None: pinned_idx = np.arange(pinned_nodes.shape[0]) + nodes.shape[0] pinned_normals = np.full_like(pinned_nodes, np.nan) nodes = np.vstack((nodes, pinned_nodes)) normals = np.vstack((normals, pinned_normals)) groups['pinned'] = pinned_idx if boundary_groups is None: boundary_groups = {'all': range(len(domain.simplices))} # TODO: There should be a test to make sure each simplex belongs to # at most one group. if boundary_groups_with_ghosts is None: boundary_groups_with_ghosts = [] # create groups for the boundary nodes logger.debug('grouping boundary nodes and generating ghosts ...') for k, v in boundary_groups.items(): # convert the list of simplices in the boundary group to a set, # because it is much faster to determine membership of a set v = set(v) bnd_idx = np.array([i for i, j in enumerate(smpid) if j in v]) groups['boundary:' + k] = bnd_idx if k in boundary_groups_with_ghosts: # append ghost nodes if requested dist = KDTree(nodes).query(nodes[bnd_idx], 2)[0][:, [1]] ghost_idx = np.arange(bnd_idx.shape[0]) + nodes.shape[0] ghost_nodes = nodes[bnd_idx] + 0.5*dist*normals[bnd_idx] ghost_normals = np.full_like(ghost_nodes, np.nan) nodes = np.vstack((nodes, ghost_nodes)) normals = np.vstack((normals, ghost_normals)) groups['ghosts:' + k] = ghost_idx logger.debug('done') # sort `nodes` so that spatially adjacent nodes are close together logger.debug('sorting nodes ...') sort_idx = neighbor_argsort(nodes) logger.debug('done') nodes = nodes[sort_idx] normals = normals[sort_idx] reverse_sort_idx = np.argsort(sort_idx) groups = {} for k, v in groups.items(): if len(v) > 0: groups[k] = reverse_sort_idx[v] #groups = {k: reverse_sort_idx[v] for k, v in groups.items()} logger.debug('checking the quality of the generated nodes ...') _check_spacing(nodes, rho) logger.debug('done') return nodes, groups, normals