Пример #1
0
    def multipole_to_multipole(self, key):
        """
        Combine multipole expansions of a node's children to approximate its
            own multipole expansion.
        """

        for child in hilbert.get_children(key):
            # Only going through non-empty child nodes
            if self.octree.source_node_to_index[child] != -1:

                # Compute operator index
                operator_idx = (child % 8) - 1

                # Updating indices
                self.source_data[key].indices.update(
                    self.source_data[child].indices)

                # Get child equivalent density
                child_equivalent_density = self.source_data[child].expansion

                # Compute parent equivalent density
                parent_equivalent_density = np.matmul(
                    self.m2m[operator_idx], child_equivalent_density)

                # Add to source data
                self.source_data[key].expansion += parent_equivalent_density
def test_l2l(npoints, octree, l2l):

    parent_key = 9
    child_key = hilbert.get_children(parent_key)[-1]

    x0 = octree.center
    r0 = octree.radius

    parent_center = hilbert.get_center_from_key(parent_key, x0, r0)
    child_center = hilbert.get_center_from_key(child_key, x0, r0)

    parent_level = hilbert.get_level(parent_key)
    child_level = hilbert.get_level(child_key)

    parent_equivalent_density = np.ones(shape=(npoints))

    operator_idx = (child_key % 8) - 1

    child_equivalent_density = np.matmul(l2l[operator_idx], parent_equivalent_density)

    child_equivalent_surface = operator.scale_surface(
        surface=SURFACE,
        radius=r0,
        level=child_level,
        center=child_center,
        alpha=2.95
    )

    parent_equivalent_surface = operator.scale_surface(
        surface=SURFACE,
        radius=r0,
        level=parent_level,
        center=parent_center,
        alpha=2.95
    )

    local_point = np.array([list(child_center)])

    parent_direct = operator.p2p(
        kernel_function=KERNEL_FUNCTION,
        targets=local_point,
        sources=parent_equivalent_surface,
        source_densities=parent_equivalent_density
    )

    child_direct = operator.p2p(
        kernel_function=KERNEL_FUNCTION,
        targets=local_point,
        sources=child_equivalent_surface,
        source_densities=child_equivalent_density
    )

    assert np.isclose(parent_direct.density, child_direct.density, rtol=RTOL)
def test_m2m(npoints, octree, m2m):

    parent_key = 0
    child_key = hilbert.get_children(parent_key)[0]

    x0 = octree.center
    r0 = octree.radius

    parent_center = hilbert.get_center_from_key(parent_key, x0, r0)
    child_center = hilbert.get_center_from_key(child_key, x0, r0)

    parent_level = hilbert.get_level(parent_key)
    child_level = hilbert.get_level(child_key)

    operator_idx = (child_key % 8) -1

    child_equivalent_density = np.ones(shape=(npoints))

    parent_equivalent_density = np.matmul(m2m[operator_idx], child_equivalent_density)

    distant_point = np.array([[1e3, 0, 0]])

    child_equivalent_surface = operator.scale_surface(
        surface=SURFACE,
        radius=r0,
        level=child_level,
        center=child_center,
        alpha=1.05
        )
    parent_equivalent_surface = operator.scale_surface(
        surface=SURFACE,
        radius=r0,
        level=parent_level,
        center=parent_center,
        alpha=1.05
        )

    parent_direct = operator.p2p(
        kernel_function=KERNEL_FUNCTION,
        targets=distant_point,
        sources=parent_equivalent_surface,
        source_densities=parent_equivalent_density
        )

    child_direct = operator.p2p(
        kernel_function=KERNEL_FUNCTION,
        targets=distant_point,
        sources=child_equivalent_surface,
        source_densities=child_equivalent_density
        )

    assert np.isclose(parent_direct.density, child_direct.density, rtol=RTOL)
Пример #4
0
    def local_to_local(self, key):
        """Translate local expansion of a node to it's children."""

        parent_equivalent_density = self.target_data[key].expansion

        for child in hilbert.get_children(key):
            if self.octree.target_node_to_index[child] != -1:

                # Compute operator index
                operator_idx = (child % 8) - 1

                # Updating indices
                self.target_data[child].indices.update(
                    self.target_data[key].indices)

                child_equivalent_density = np.matmul(
                    self.l2l[operator_idx], parent_equivalent_density)

                self.target_data[child].expansion = child_equivalent_density
Пример #5
0
def test_interaction_list_assignment(tree):
    """Check that the interaction list has been correctly assigned."""

    source_nodes_set = set(tree.non_empty_source_nodes)

    for target_index, target in enumerate(tree.non_empty_target_nodes):
        level = hilbert.get_level(target)
        if level < 2:
            continue

        parent = hilbert.get_parent(target)
        parent_index = tree.target_node_to_index[parent]
        parent_neighbors = tree.target_neighbors[parent_index]

        target_neighbors = tree.target_neighbors[
            tree.target_node_to_index[target]]

        for neighbor_index in range(27):
            parent_neighbor = parent_neighbors[neighbor_index]

            if parent_neighbors[neighbor_index] == -1:
                # The corresponding neighbor has no sources.
                assert np.all(tree.interaction_list[target_index,
                                                    neighbor_index] == -1)

            else:
                # There are sources in the neighbor
                for child_index, child in enumerate(
                        hilbert.get_children(parent_neighbor)):
                    if child in source_nodes_set and child not in set(
                            target_neighbors):
                        assert tree.interaction_list[target_index,
                                                     neighbor_index,
                                                     child_index] == child  # pylint: disable=C0301
                    else:
                        assert tree.interaction_list[target_index,
                                                     neighbor_index,
                                                     child_index] == -1  # pylint: disable=C0301
Пример #6
0
def test_get_children(parent, expected):
    assert np.array_equal(hilbert.get_children(parent), expected)
Пример #7
0
def numba_compute_interaction_list(targets, target_neighbors,
                                   source_node_to_index, target_node_to_index):
    """
    Compute the interaction list for all given target nodes.

    Parameters:
    -----------
    targets : np.array(shape=(ntargets,), dtype=np.int64)
        Target nodes, referenced by Hilbert key, for which interaction lists are
        being computed.
    target_neighbors : np.array(shape=(ntargets, 27), dtype=np.int64)
        Contains information on non-empty source neighbor nodes of each target,
        computed via `numba_compute_neighbors`.
    source_node_to_index : np.array(shape=(nsources,), dtype=np.int64)
    target_node_to_index : np.array(shape=(ntargets,), dtype=np.int64)

    Returns:
    --------
    np.array(shape=(ntargets, 27, 8), dtype=np.int64)
        Interaction list, each target in n targets has an associated (27, 8)
        matrix associated with it.
    """

    ntargets = len(targets)

    interaction_list = -1 * np.ones((ntargets, 27, 8), dtype=np.int64)

    for target_index, target in enumerate(targets):
        target_level = hilbert.get_level(target)

        if target_level >= 2:

            # Find parent
            parent = hilbert.get_parent(target)

            # Find parent neighbors
            parent_index = target_node_to_index[parent]
            parent_neighbors = target_neighbors[parent_index]

            for parent_neighbor_index, parent_neighbor in enumerate(
                    parent_neighbors):

                if parent_neighbor != -1:

                    parent_neighbor_children = hilbert.get_children(
                        parent_neighbor)

                    for neigbhor_child_index, neighbor_child in enumerate(
                            parent_neighbor_children):

                        is_neighbor = _is_neighbor(
                            target_neighbors=target_neighbors,
                            target_index=target_index,
                            key=neighbor_child)

                        if source_node_to_index[
                                neighbor_child] != -1 and ~is_neighbor:
                            interaction_list[
                                target_index, parent_neighbor_index,
                                neigbhor_child_index] = neighbor_child

    return interaction_list
Пример #8
0
def main(**config):
    """
    Main script, configure using config.json file in module root.
    """
    start = time.time()

    # Setup Multiproc
    processes = os.cpu_count()
    pool = multiproc.setup_pool(processes=processes)

    data_dirpath = PARENT / f"{config['data_dirname']}/"
    operator_dirpath = PARENT / f"{config['operator_dirname']}/"

    # Step 0: Construct Octree and load Python config objs
    print("source filename", data_dirpath)

    sources = data.load_hdf5_to_array(config['source_filename'],
                                      config['source_filename'], data_dirpath)

    targets = data.load_hdf5_to_array(config['target_filename'],
                                      config['target_filename'], data_dirpath)

    source_densities = data.load_hdf5_to_array(
        config['source_densities_filename'],
        config['source_densities_filename'], data_dirpath)

    octree = Octree(sources, targets, config['octree_max_level'],
                    source_densities)

    # Load required Python objects
    kernel = KERNELS[config['kernel']]()

    # Step 1: Compute a surface of a given order
    # Check if surface already exists
    if data.file_in_directory(config['surface_filename'], operator_dirpath):
        print(f"Already Computed Surface of Order {config['order']}")
        print(f"Loading ...")
        surface = data.load_hdf5_to_array(config['surface_filename'],
                                          config['surface_filename'],
                                          operator_dirpath)

    else:
        print(f"Computing Surface of Order {config['order']}")
        surface = operator.compute_surface(config['order'])

        print("Saving Surface to HDF5")
        data.save_array_to_hdf5(operator_dirpath, config['surface_filename'],
                                surface)

    # Step 2: Use surfaces to compute inverse of check to equivalent Gram matrix.
    # This is a useful quantity that will form the basis of most operators.

    if data.file_in_directory('uc2e_u', operator_dirpath):
        print(
            f"Already Computed Inverse of Check To Equivalent Kernel of Order {config['order']}"
        )
        print("Loading...")

        # Upward check to upward equivalent
        uc2e_u = data.load_hdf5_to_array('uc2e_u', 'uc2e_u', operator_dirpath)
        uc2e_v = data.load_hdf5_to_array('uc2e_v', 'uc2e_v', operator_dirpath)

        # Downward check to downward equivalent
        dc2e_u = data.load_hdf5_to_array('dc2e_u', 'dc2e_u', operator_dirpath)
        dc2e_v = data.load_hdf5_to_array('dc2e_v', 'dc2e_v', operator_dirpath)

    else:
        print(
            f"Computing Inverse of Check To Equivalent Gram Matrix of Order {config['order']}"
        )

        # Compute upward check surface and upward equivalent surface
        # These are computed in a decomposed from the SVD of the Gram matrix
        # of these two surfaces

        upward_equivalent_surface = operator.scale_surface(
            surface=surface,
            radius=octree.radius,
            level=0,
            center=octree.center,
            alpha=config['alpha_inner'])

        upward_check_surface = operator.scale_surface(
            surface=surface,
            radius=octree.radius,
            level=0,
            center=octree.center,
            alpha=config['alpha_outer'])

        uc2e_v, uc2e_u = operator.compute_check_to_equivalent_inverse(
            kernel_function=kernel,
            check_surface=upward_check_surface,
            equivalent_surface=upward_equivalent_surface,
            cond=None)

        dc2e_v, dc2e_u = operator.compute_check_to_equivalent_inverse(
            kernel_function=kernel,
            check_surface=upward_equivalent_surface,
            equivalent_surface=upward_check_surface,
            cond=None)

        # Save matrices
        print("Saving Inverse of Check To Equivalent Matrices")
        data.save_array_to_hdf5(operator_dirpath, 'uc2e_v', uc2e_v)
        data.save_array_to_hdf5(operator_dirpath, 'uc2e_u', uc2e_u)
        data.save_array_to_hdf5(operator_dirpath, 'dc2e_v', dc2e_v)
        data.save_array_to_hdf5(operator_dirpath, 'dc2e_u', dc2e_u)

    # Step 3: Compute M2M/L2L operators
    if (data.file_in_directory('m2m', operator_dirpath)
            and data.file_in_directory('l2l', operator_dirpath)):
        print(
            f"Already Computed M2M & L2L Operators of Order {config['order']}")

    else:
        parent_center = octree.center
        parent_radius = octree.radius
        parent_level = 0
        child_level = 1

        child_centers = [
            hilbert.get_center_from_key(child, parent_center, parent_radius)
            for child in hilbert.get_children(0)
        ]

        parent_upward_check_surface = operator.scale_surface(
            surface=surface,
            radius=octree.radius,
            level=parent_level,
            center=octree.center,
            alpha=config['alpha_outer'])

        m2m = []
        l2l = []

        loading = len(child_centers)

        scale = (1 / kernel.scale)**(child_level)

        print(f"Computing M2M & L2L Operators of Order {config['order']}")
        for child_idx, child_center in enumerate(child_centers):
            print(f'Computed ({child_idx+1}/{loading}) M2L/L2L operators')

            child_upward_equivalent_surface = operator.scale_surface(
                surface=surface,
                radius=octree.radius,
                level=child_level,
                center=child_center,
                alpha=config['alpha_inner'])

            pc2ce = operator.gram_matrix(
                kernel_function=kernel,
                targets=parent_upward_check_surface,
                sources=child_upward_equivalent_surface,
            )

            # Compute M2M operator for this octant
            tmp = np.matmul(uc2e_u, pc2ce)
            m2m.append(np.matmul(uc2e_v, tmp))

            # Compute L2L operator for this octant
            cc2pe = operator.gram_matrix(
                kernel_function=kernel,
                targets=child_upward_equivalent_surface,
                sources=parent_upward_check_surface)

            tmp = np.matmul(dc2e_u, cc2pe)
            l2l.append(np.matmul(scale * dc2e_v, tmp))

        # Save m2m & l2l operators, index is equivalent to their Hilbert key
        m2m = np.array(m2m)
        l2l = np.array(l2l)
        print("Saving M2M & L2L Operators")
        data.save_array_to_hdf5(operator_dirpath, 'm2m', m2m)
        data.save_array_to_hdf5(operator_dirpath, 'l2l', l2l)

    # Step 4: Compute M2L operators

    # Create sub-directory to store m2l computations
    m2l_dirpath = operator_dirpath
    current_level = 2

    already_computed = False

    while current_level <= config['octree_max_level']:

        m2l_filename = f'm2l_level_{current_level}'

        if data.file_in_directory(m2l_filename, operator_dirpath, ext='pkl'):
            already_computed = True

        if already_computed:
            print(f"Already Computed M2L operators for level {current_level}")

        else:

            print(f"Computing M2L Operators for Level {current_level}")

            leaves = np.arange(hilbert.get_level_offset(current_level),
                               hilbert.get_level_offset(current_level + 1))

            loading = 0

            m2l = [[] for leaf in range(len(leaves))]

            index_to_key = [None for leaf in range(len(leaves))]

            index_to_key_filename = f'index_to_key_level_{current_level}'

            args = []

            # Gather arguments needed to send out to processes, and create index
            # mapping
            for target_idx, target in enumerate(leaves):

                interaction_list = hilbert.get_interaction_list(target)

                # Create index mapping for looking up the m2l operator
                index_to_key[target_idx] = interaction_list

                # Add arg to args for parallel mapping
                arg = (target, kernel, surface, config['alpha_inner'],
                       octree.center, octree.radius, dc2e_v, dc2e_u,
                       interaction_list)

                args.append(arg)

            # Submit tasks to process pool
            m2l = pool.starmap(compute_m2l_matrices, args)

            # Convert results to matrix
            m2l = np.array([np.array(l) for l in m2l])

            print(f"Saving Dense M2L Operators for level {current_level}")
            data.save_pickle(m2l, m2l_filename, m2l_dirpath)
            data.save_pickle(index_to_key, index_to_key_filename, m2l_dirpath)

        current_level += 1
        already_computed = False

    minutes, seconds = utils.time.seconds_to_minutes(time.time() - start)
    print(
        f"Total time elapsed {minutes:.0f} minutes and {seconds:.0f} seconds")