示例#1
0
def generate_nodes(alpha, nsample, nodeitr):
    from rbf.pde.nodes import min_energy_nodes
    from rbf.pde.geometry import contains

    N = nsample * 2  # total number of nodes
    node_count = 0
    itr = nodeitr

    vert = alpha.points
    smp = np.asarray(alpha.bounds, dtype=np.int64)

    while node_count < nsample:
        logger.info("Generating %i nodes (%i iterations)..." % (N, itr))
        # create N quasi-uniformly distributed nodes
        out = min_energy_nodes(N, (vert, smp), iterations=itr)
        nodes = out[0]

        # remove nodes outside of the domain
        in_nodes = nodes[contains(nodes, vert, smp)]

        node_count = len(in_nodes)
        N = int(1.5 * N)

    logger.info("%i interior nodes generated (%i iterations)" %
                (node_count, itr))

    xyz_coords = in_nodes.reshape(-1, 3)

    return xyz_coords
示例#2
0
def test_nodes():
    from rbf.pde.nodes import min_energy_nodes
    from rbf.pde.geometry import contains
    from dentate.alphavol import alpha_shape
    from mayavi import mlab

    obs_u = np.linspace(-0.016 * np.pi, 1.01 * np.pi, 25)
    obs_v = np.linspace(-0.23 * np.pi, 1.425 * np.pi, 25)
    obs_l = np.linspace(-1.0, 1., num=10)

    u, v, l = np.meshgrid(obs_u, obs_v, obs_l, indexing='ij')
    xyz = DG_volume(u, v, l, rotate=[-35., 0., 0.])

    print('Constructing volume...')
    vol = RBFVolume(obs_u, obs_v, obs_l, xyz, order=2)

    print('Constructing volume triangulation...')
    tri = vol.create_triangulation()

    print('Constructing alpha shape...')
    alpha = alpha_shape([], 120., tri=tri)

    # Define the problem domain
    vert = alpha.points
    smp = np.asarray(alpha.bounds, dtype=np.int64)

    N = 10000  # total number of nodes

    # create N quasi-uniformly distributed nodes
    print('Generating nodes...')
    rbf_logger = logging.Logger.manager.loggerDict['rbf.pde.nodes']
    rbf_logger.setLevel(logging.DEBUG)
    out = min_energy_nodes(N, (vert, smp), iterations=10, build_rtree=True)

    nodes = out[0]

    # remove nodes outside of the domain
    in_nodes = nodes[contains(nodes, vert, smp)]

    print('Generated %d interior nodes' % len(in_nodes))

    vol.mplot_surface(color=(0, 1, 0), opacity=0.33, ures=10, vres=10)
    mlab.points3d(*in_nodes.T, color=(1, 1, 0), scale_factor=15.0)

    mlab.show()

    return in_nodes, vol.inverse(in_nodes)
def gen_min_energy_nodes(count, domain, constraint, nodeiter, dispersion_delta,
                         snap_delta):

    N = int(count * 2)  # layer-specific number of nodes
    node_count = 0

    while node_count < count:
        # create N quasi-uniformly distributed nodes
        def rho(x):
            return np.ones(x.shape[0])

        #nodes = rejection_sampling(N, rho, (vert, smp), start=0)

        out = min_energy_nodes(N,
                               domain,
                               iterations=nodeiter,
                               **{
                                   'dispersion_delta': dispersion_delta,
                                   'snap_delta': snap_delta
                               })
        nodes = out[0]

        # remove nodes with nan
        nodes1 = nodes[~np.logical_or.reduce(
            (np.isnan(nodes[:, 0]), np.isnan(nodes[:, 1]),
             np.isnan(nodes[:, 2])))]

        # remove nodes outside of the domain
        vert, smp = domain
        in_nodes = nodes[contains(nodes1, vert, smp)]
        valid_idxs = None
        if constraint is not None:
            valid_idxs = []
            current_xyz = in_nodes.reshape(-1, 3)
            for i in range(len(current_xyz)):
                if current_xyz[i][2] >= constraint[0] and current_xyz[i][
                        2] <= constraint[1]:
                    valid_idxs.append(i)
            in_nodes = in_nodes[valid_idxs]
        node_count = len(in_nodes)
        N = int(1.5 * N)
        logger.info("%i interior nodes out of %i nodes generated" %
                    (node_count, len(nodes)))

    return in_nodes
def main(config, config_prefix, types_path, template_path, geometry_path,
         output_path, output_namespace, populations, resolution, alpha_radius,
         nodeiter, optiter, io_size, chunk_size, value_chunk_size, verbose):

    config_logging(verbose)
    logger = get_script_logger(script_name)

    comm = MPI.COMM_WORLD
    rank = comm.rank
    size = comm.size

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    if rank == 0:
        if not os.path.isfile(output_path):
            input_file = h5py.File(types_path, 'r')
            output_file = h5py.File(output_path, 'w')
            input_file.copy('/H5Types', output_file)
            input_file.close()
            output_file.close()
    comm.barrier()

    env = Env(comm=comm, config_file=config, config_prefix=config_prefix)

    random_seed = int(env.model_config['Random Seeds']['Soma Locations'])
    random.seed(random_seed)

    layer_extents = env.geometry['Parametric Surface']['Layer Extents']
    rotate = env.geometry['Parametric Surface']['Rotation']

    (extent_u, extent_v, extent_l) = get_total_extents(layer_extents)
    vol = make_volume(extent_u,
                      extent_v,
                      extent_l,
                      rotate=rotate,
                      resolution=resolution)

    layer_alpha_shapes = {}
    layer_alpha_shape_path = 'Layer Alpha Shape/%d/%d/%d' % resolution
    if rank == 0:
        for layer, extents in viewitems(layer_extents):
            gc.collect()
            has_layer_alpha_shape = False
            if geometry_path:
                this_layer_alpha_shape_path = '%s/%s' % (
                    layer_alpha_shape_path, layer)
                this_layer_alpha_shape = load_alpha_shape(
                    geometry_path, this_layer_alpha_shape_path)
                layer_alpha_shapes[layer] = this_layer_alpha_shape
                if this_layer_alpha_shape is not None:
                    has_layer_alpha_shape = True
            if not has_layer_alpha_shape:
                this_layer_alpha_shape = make_alpha_shape(
                    extents[0],
                    extents[1],
                    alpha_radius=alpha_radius,
                    rotate=rotate,
                    resolution=resolution)
                layer_alpha_shapes[layer] = this_layer_alpha_shape
                if geometry_path:
                    save_alpha_shape(geometry_path,
                                     this_layer_alpha_shape_path,
                                     this_layer_alpha_shape)

    population_ranges = read_population_ranges(output_path, comm)[0]

    if rank == 0:
        color = 1
    else:
        color = 0

    ## comm0 includes only rank 0
    comm0 = comm.Split(color, 0)

    for population in populations:

        (population_start, population_count) = population_ranges[population]

        pop_layers = env.geometry['Cell Distribution'][population]
        pop_layer_count = 0
        for layer, count in viewitems(pop_layers):
            pop_layer_count += count
        assert (population_count == pop_layer_count)
        if rank == 0:
            logger.info("Population %s: layer distribution is %s" %
                        (population, str(pop_layers)))

        xyz_coords = None
        xyz_coords_interp = None
        uvl_coords_interp = None
        if rank == 0:

            xyz_coords_lst = []
            xyz_coords_interp_lst = []
            uvl_coords_interp_lst = []
            for layer, count in viewitems(pop_layers):

                if count <= 0:
                    continue

                alpha = layer_alpha_shapes[layer]

                vert = alpha.points
                smp = np.asarray(alpha.bounds, dtype=np.int64)

                N = int(count * 2)  # layer-specific number of nodes
                node_count = 0

                logger.info("Generating %i nodes..." % N)

                if verbose:
                    rbf_logger = logging.Logger.manager.loggerDict[
                        'rbf.pde.nodes']
                    rbf_logger.setLevel(logging.DEBUG)

                while node_count < count:
                    # create N quasi-uniformly distributed nodes
                    out = min_energy_nodes(N, (vert, smp), iterations=nodeiter)
                    nodes = out[0]

                    # remove nodes outside of the domain
                    in_nodes = nodes[contains(nodes, vert, smp)]

                    node_count = len(in_nodes)
                    N = int(1.5 * N)

                    logger.info("%i interior nodes out of %i nodes generated" %
                                (node_count, len(nodes)))

                xyz_coords_lst.append(in_nodes.reshape(-1, 3))

            xyz_coords = np.concatenate(xyz_coords_lst)
            logger.info("Inverse interpolation of %i nodes..." %
                        len(xyz_coords))
            uvl_coords_interp = vol.inverse(xyz_coords)
            xyz_coords_interp = vol(uvl_coords_interp[:, 0],
                                    uvl_coords_interp[:, 1],
                                    uvl_coords_interp[:, 2],
                                    mesh=False).reshape(3, -1).T

            logger.info("Broadcasting generated nodes...")

        xyz_coords = comm.bcast(xyz_coords, root=0)
        xyz_coords_interp = comm.bcast(xyz_coords_interp, root=0)
        uvl_coords_interp = comm.bcast(uvl_coords_interp, root=0)

        coords = []
        coords_dict = {}
        xyz_error = np.asarray([0.0, 0.0, 0.0])

        if rank == 0:
            logger.info("Computing UVL coordinates...")

        for i in range(0, xyz_coords.shape[0]):

            coord_ind = i
            if i % size == rank:

                if uvl_in_bounds(uvl_coords_interp[coord_ind, :],
                                 layer_extents, pop_layers):
                    uvl_coords = uvl_coords_interp[coord_ind, :].ravel()
                    xyz_coords1 = xyz_coords_interp[coord_ind, :].ravel()
                else:
                    uvl_coords = None
                    xyz_coords1 = None

                if uvl_coords is not None:

                    xyz_error = np.add(
                        xyz_error,
                        np.abs(
                            np.subtract(xyz_coords[coord_ind, :],
                                        xyz_coords1)))

                    logger.info(
                        'Rank %i: cell %i: %f %f %f' %
                        (rank, i, uvl_coords[0], uvl_coords[1], uvl_coords[2]))

                    coords.append(
                        (xyz_coords1[0], xyz_coords1[1], xyz_coords1[2],
                         uvl_coords[0], uvl_coords[1], uvl_coords[2]))

        total_xyz_error = np.zeros((3, ))
        comm.Allreduce(xyz_error, total_xyz_error, op=MPI.SUM)

        coords_count = 0
        coords_count = np.sum(np.asarray(comm.allgather(len(coords))))

        if rank == 0:
            logger.info('Total %i coordinates generated' % coords_count)

        mean_xyz_error = np.asarray([(total_xyz_error[0] / coords_count), \
                                     (total_xyz_error[1] / coords_count), \
                                     (total_xyz_error[2] / coords_count)])

        if rank == 0:
            logger.info(
                "mean XYZ error: %f %f %f " %
                (mean_xyz_error[0], mean_xyz_error[1], mean_xyz_error[2]))

        coords_lst = comm.gather(coords, root=0)
        if rank == 0:
            all_coords = []
            for sublist in coords_lst:
                for item in sublist:
                    all_coords.append(item)

            if coords_count < population_count:
                logger.warning("Generating additional %i coordinates " %
                               (population_count - len(all_coords)))

                safety = 0.01
                delta = population_count - len(all_coords)
                for i in range(delta):
                    for layer, count in viewitems(pop_layers):
                        if count > 0:
                            min_extent = layer_extents[layer][0]
                            max_extent = layer_extents[layer][1]
                            coord_u = np.random.uniform(
                                min_extent[0] + safety, max_extent[0] - safety)
                            coord_v = np.random.uniform(
                                min_extent[1] + safety, max_extent[1] - safety)
                            coord_l = np.random.uniform(
                                min_extent[2] + safety, max_extent[2] - safety)
                            xyz_coords = DG_volume(coord_u,
                                                   coord_v,
                                                   coord_l,
                                                   rotate=rotate).ravel()
                            all_coords.append((xyz_coords[0],xyz_coords[1],xyz_coords[2],\
                                              coord_u, coord_v, coord_l))

            sampled_coords = random_subset(all_coords, int(population_count))

            sampled_coords.sort(
                key=lambda coord: coord[3])  ## sort on U coordinate

            coords_dict = {
                population_start + i: {
                    'X Coordinate': np.asarray([x_coord], dtype=np.float32),
                    'Y Coordinate': np.asarray([y_coord], dtype=np.float32),
                    'Z Coordinate': np.asarray([z_coord], dtype=np.float32),
                    'U Coordinate': np.asarray([u_coord], dtype=np.float32),
                    'V Coordinate': np.asarray([v_coord], dtype=np.float32),
                    'L Coordinate': np.asarray([l_coord], dtype=np.float32)
                }
                for (i, (x_coord, y_coord, z_coord, u_coord, v_coord,
                         l_coord)) in enumerate(sampled_coords)
            }

            append_cell_attributes(output_path,
                                   population,
                                   coords_dict,
                                   namespace=output_namespace,
                                   io_size=io_size,
                                   chunk_size=chunk_size,
                                   value_chunk_size=value_chunk_size,
                                   comm=comm0)

        comm.barrier()

    comm0.Free()
示例#5
0
# define the problem domain
vert = np.array([[0.0, 0.0], [2.0, 0.0], [2.0, 1.0], [1.0, 1.0], [1.0, 2.0],
                 [0.0, 2.0]])
smp = np.array([[0, 1], [1, 2], [2, 3], [3, 4], [4, 5], [5, 0]])
times = np.linspace(0.0, 10.0, 20)  # output times
n_nominal = 1000  # total number of nodes
lamb = 1.0
mu = 1.0
rho = 1.0
stencil_size = 30
order = 2
basis = rbf.basis.phs5
nodes, idx, normals = min_energy_nodes(
    n_nominal,
    vert,
    smp,
    boundary_groups={'all': range(len(smp))},
    boundary_groups_with_ghosts=['all'],
    include_vertices=False)
n = nodes.shape[0]

# create initial and boundary conditions
r = np.sqrt((nodes[idx['interior'], 0] - 0.5)**2 +
            (nodes[idx['interior'], 1] - 0.5)**2)

u_init = np.zeros_like(nodes)
u_init[idx['interior'], 0] = 1.0 / (1 + (r / 0.2)**4)
u_init[idx['interior'], 1] = 1.0 / (1 + (r / 0.2)**4)

v_init = np.zeros_like(nodes)
示例#6
0
    '''
  compute the density of `nodes` and evaluate the density function at
  `x`. The output is normalize 1.0
  '''
    out = np.zeros(x.shape[0])
    for n in nodes:
        out += se(x, n[None, :], eps=0.01)[:, 0]

    out /= np.max(out)
    return out


vert = np.array([[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0]])
smp = np.array([[0, 1], [1, 2], [2, 3], [3, 0]])

nodes = min_energy_nodes(10000, (vert, smp), rho=desired_rho)[0]

# plot the nodes
fig, ax = plt.subplots()
for s in smp:
    ax.plot(vert[s, 0], vert[s, 1], 'k-')

ax.plot(nodes[:, 0], nodes[:, 1], 'k.', ms=1)
ax.set_aspect('equal')
ax.set_title('node positions')
fig.tight_layout()
plt.savefig('../figures/nodes.b.1.png')

fig, axs = plt.subplots(1, 2, figsize=(10, 4))

# evaluate and plot the node density
示例#7
0
boundary_groups = {'inside_corner': [0, 1],
                   'outside_corner': [2, 3, 4, 5]}
# define which boundary groups get ghost nodes
boundary_groups_with_ghosts = ['outside_corner']
# total number of nodes 
N = 500
# define a node density function. It takes an (N, D) array of positions
# and returns an (N,) array of normalized densities between 0 and 1
def rho(x):
  r = np.sqrt((x[:, 0] - 1.0)**2 + (x[:, 1] - 1.0)**2)
  return 0.2 + 0.8/((r/0.3)**4 + 1.0)

nodes, groups, normals = min_energy_nodes(
    N, 
    (vert, smp),
    rho=rho,
    boundary_groups=boundary_groups,
    boundary_groups_with_ghosts=boundary_groups_with_ghosts,
    include_vertices=True)

# plot the results
fig, ax = plt.subplots(figsize=(6, 6))
# plot the domain
for s in smp: 
  ax.plot(vert[s, 0], vert[s, 1], 'k-')

# plot the different node groups and their normal vectors
for i, (name, idx) in enumerate(groups.items()):
  ax.plot(nodes[idx, 0], nodes[idx, 1], 'C%s.' % i, label=name, ms=8)
  ax.quiver(nodes[idx, 0], nodes[idx, 1], 
            normals[idx, 0], normals[idx, 1], 
示例#8
0
文件: fd.a.py 项目: soltesz-lab/RBF
import numpy as np
from scipy.integrate import ode
from scipy.interpolate import griddata
import matplotlib.pyplot as plt

from rbf.pde.fd import weight_matrix
from rbf.pde.nodes import min_energy_nodes
from rbf.pde.geometry import contains

# define the problem domain
vert = np.array([[0.0, 0.0], [2.0, 0.0], [2.0, 1.0], [1.0, 1.0], [1.0, 2.0],
                 [0.0, 2.0]])
smp = np.array([[0, 1], [1, 2], [2, 3], [3, 4], [4, 5], [5, 0]])
times = np.linspace(0.0, 2.0, 5)  # output times
N = 20000  # total number of nodes
nodes, idx, _ = min_energy_nodes(N, (vert, smp))  # generate nodes
# create differentiation matrices for the interior and boundary nodes
D = weight_matrix(nodes[idx['interior']], nodes, 50, [(2, 0), (0, 2)])
# create initial and boundary conditions
r = np.sqrt((nodes[idx['interior'], 0] - 0.5)**2 +
            (nodes[idx['interior'], 1] - 0.5)**2)
u_init = 1.0 / (1 + (r / 0.05)**4)  # initial u in the interior
dudt_init = np.zeros(len(idx['interior']))  # initial velocity in the interior
u_bnd = np.zeros(len(idx['boundary:all']))  # boundary conditions
# Make state vector containing the initial displacements and velocities
v = np.hstack([u_init, dudt_init])


def f(t, v):
    ''' 
  Function used for time integration. This calculates the time