Exemple #1
0
def reassemble_volume_field(rcon, global_discr, local_discr, field):
    from pytools import reverse_dictionary
    local2global_element = reverse_dictionary(
        local_discr.global2local_elements)

    send_packet = {}
    for eg in local_discr.element_groups:
        for el, eslice in zip(eg.members, eg.ranges):
            send_packet[local2global_element[el.id]] = field[eslice]

    def reduction(a, b):
        a.update(b)
        return a

    gfield_parts = rcon.communicator.reduce(send_packet,
                                            root=rcon.head_rank,
                                            op=reduction)

    if rcon.is_head_rank:
        result = global_discr.volume_zeros()
        for eg in global_discr.element_groups:
            for el, eslice in zip(eg.members, eg.ranges):
                my_part = gfield_parts[el.id]
                assert len(my_part) == eslice.stop - eslice.start
                result[eslice] = my_part
        return result
    else:
        return None
Exemple #2
0
def reassemble_volume_field(rcon, global_discr, local_discr, field):
    from pytools import reverse_dictionary
    local2global_element = reverse_dictionary(
            local_discr.global2local_elements)

    send_packet = {}
    for eg in local_discr.element_groups:
        for el, eslice in zip(eg.members, eg.ranges):
            send_packet[local2global_element[el.id]] = field[eslice]

    def reduction(a, b):
        a.update(b)
        return a

    gfield_parts = rcon.communicator.reduce(
            send_packet, root=rcon.head_rank, op=reduction)

    if rcon.is_head_rank:
        result = global_discr.volume_zeros()
        for eg in global_discr.element_groups:
            for el, eslice in zip(eg.members, eg.ranges):
                my_part = gfield_parts[el.id]
                assert len(my_part) == eslice.stop-eslice.start
                result[eslice] = my_part
        return result
    else:
        return None
Exemple #3
0
    def _setup_neighbor_connections(self):
        comm = self.context.communicator

        # Why is this barrier needed? Some of our ranks may arrive at this
        # point early and start sending packets to ranks that are still stuck
        # in previous wildcard-recv loops. These receivers will then be very
        # confused by packets they didn't expect, and, once they reach their
        # recv bit in *this* subroutine, will wait for packets that will never
        # arrive. This same argument does not apply to other recv()s in this
        # file because they are targeted and thus benefit from MPI's
        # non-overtaking rule.
        #
        # Parallel programming is fun.
        comm.Barrier()

        if self.neighbor_ranks:
            # send interface information to neighboring ranks -----------------
            from pytools import reverse_dictionary
            local2global_vertex_indices = \
                    reverse_dictionary(self.global2local_vertex_indices)

            send_requests = []

            for rank in self.neighbor_ranks:
                bdry_tag = hedge.mesh.TAG_RANK_BOUNDARY(rank)
                rank_bdry = self.subdiscr.mesh.tag_to_boundary[bdry_tag]
                rank_discr_boundary = self.subdiscr.get_boundary(bdry_tag)

                # a list of global vertex numbers for each face
                my_vertices_global = [
                        tuple(local2global_vertex_indices[vi]
                            for vi in el.faces[face_nr])
                        for el, face_nr in rank_bdry]

                # a list of node coordinates, indicating the order
                # in which nodal values will be sent, this is for
                # testing only and could (potentially) be omitted

                my_node_coords = []
                for el, face_nr in rank_bdry:
                    eslice, ldis = self.subdiscr.find_el_data(el.id)
                    findices = ldis.face_indices()[face_nr]

                    my_node_coords.append(
                            [self.nodes[eslice.start+i] for i in findices])

                # compile a list of FluxFace.h values for unification
                # across the rank boundary

                my_h_values = [rank_discr_boundary.find_facepair_side(el_face).h
                        for el_face in rank_bdry]

                packet = (my_vertices_global, my_node_coords, my_h_values)

                send_requests.append(comm.isend(packet, dest=rank, tag=0))

            received_packets = {}
            while len(received_packets) < len(self.neighbor_ranks):
                status = mpi.Status()
                received_packet = comm.recv(tag=0, source=mpi.ANY_SOURCE, status=status)
                received_packets[status.source] = received_packet

            mpi.Request.Waitall(send_requests)

            # process received packets ----------------------------------------
            from pytools import flatten

            # nb_ stands for neighbor_

            self.from_neighbor_maps = {}

            for rank, (nb_all_facevertices_global, nb_node_coords, nb_h_values) in \
                    received_packets.iteritems():
                bdry_tag = hedge.mesh.TAG_RANK_BOUNDARY(rank)
                rank_bdry = self.subdiscr.mesh.tag_to_boundary[bdry_tag]
                rank_discr_boundary = self.subdiscr.get_boundary(bdry_tag)

                flat_nb_node_coords = list(flatten(nb_node_coords))

                # step 1: find start node indices for each
                # of the neighbor's elements
                nb_face_starts = [0]
                for node_coords in nb_node_coords[:-1]:
                    nb_face_starts.append(
                            nb_face_starts[-1]+len(node_coords))

                # step 2: match faces by matching vertices
                nb_face_order = dict(
                        (frozenset(vertices), i)
                        for i, vertices in enumerate(nb_all_facevertices_global))

                # step 3: make a list of indices into the data we
                # receive from our neighbor that'll tell us how
                # to reshuffle them to match our node order
                from_indices = []

                shuffled_indices_cache = {}

                def get_shuffled_indices(face_node_count, shuffle_op):
                    try:
                        return shuffled_indices_cache[shuffle_op]
                    except KeyError:
                        unshuffled_indices = range(face_node_count)
                        result = shuffled_indices_cache[shuffle_op] = \
                                shuffle_op(unshuffled_indices)
                        return result

                for el, face_nr in rank_bdry:
                    eslice, ldis = self.subdiscr.find_el_data(el.id)

                    my_vertices = el.faces[face_nr]
                    my_global_vertices = tuple(local2global_vertex_indices[vi]
                            for vi in my_vertices)

                    face_node_count = ldis.face_node_count()
                    try:
                        nb_face_idx = nb_face_order[frozenset(my_global_vertices)]
                        # continue below in else part
                    except KeyError:
                        # this happens if my_global_vertices is not a permutation
                        # of the neighbor's face vertices. Periodicity is the only
                        # reason why that would be so.
                        my_vertices_there, axis = self.global_periodic_opposite_faces[
                                my_global_vertices]
                        nb_face_idx = nb_face_order[frozenset(my_vertices_there)]

                        his_vertices_here, axis2 = self.global_periodic_opposite_faces[
                                nb_all_facevertices_global[nb_face_idx]]

                        assert axis == axis2

                        nb_face_start = nb_face_starts[nb_face_idx]

                        shuffle_op = \
                                ldis.get_face_index_shuffle_to_match(
                                        my_global_vertices,
                                        his_vertices_here)

                        shuffled_other_node_indices = [nb_face_start+i
                                for i in get_shuffled_indices(
                                    face_node_count, shuffle_op)]

                        from_indices.extend(shuffled_other_node_indices)

                        # check if the nodes really match up
                        if "parallel_setup" in self.debug:
                            my_node_indices = [eslice.start+i for i in ldis.face_indices()[face_nr]]

                            for my_i, other_i in zip(my_node_indices, shuffled_other_node_indices):
                                dist = self.nodes[my_i]-flat_nb_node_coords[other_i]
                                dist[axis] = 0
                                assert la.norm(dist) < 1e-14
                    else:
                        # continue handling of nonperiodic case
                        nb_global_vertices = nb_all_facevertices_global[nb_face_idx]

                        nb_face_start = nb_face_starts[nb_face_idx]

                        shuffle_op = \
                                ldis.get_face_index_shuffle_to_match(
                                        my_global_vertices,
                                        nb_global_vertices)

                        shuffled_other_node_indices = [nb_face_start+i
                                for i in get_shuffled_indices(
                                    face_node_count, shuffle_op)]

                        from_indices.extend(shuffled_other_node_indices)

                        # check if the nodes really match up
                        if "parallel_setup" in self.debug:
                            my_node_indices = [eslice.start+i
                                    for i in ldis.face_indices()[face_nr]]

                            for my_i, other_i in zip(my_node_indices, shuffled_other_node_indices):
                                dist = self.nodes[my_i]-flat_nb_node_coords[other_i]
                                assert la.norm(dist) < 1e-14

                    # finally, unify FluxFace.h values across boundary
                    nb_h = nb_h_values[nb_face_idx]
                    flux_face = rank_discr_boundary.find_facepair_side((el, face_nr))
                    flux_face.h = max(nb_h, flux_face.h)

                if "parallel_setup" in self.debug:
                    assert len(from_indices) == len(flat_nb_node_coords)

                # construct from_neighbor_map
                self.from_neighbor_maps[rank] = \
                        self.subdiscr.prepare_from_neighbor_map(from_indices)
Exemple #4
0
    def build_mesh(self,
                   periodicity,
                   allow_internal_boundaries,
                   tag_mapper,
                   boundary_tagger=None):
        # figure out dimensionalities
        vol_dim = max(
            el.el_type.dimensions
            for key, el in self.gmsh_vertex_nrs_to_element.iteritems())

        vol_elements = [
            el for key, el in self.gmsh_vertex_nrs_to_element.iteritems()
            if el.el_type.dimensions == vol_dim
        ]

        # build hedge-compatible elements
        from hedge.mesh.element import TO_CURVED_CLASS

        hedge_vertices = []
        hedge_elements = []

        gmsh_node_nr_to_hedge_vertex_nr = {}
        hedge_el_to_gmsh_element = {}

        def get_vertex_nr(gmsh_node_nr):
            try:
                return gmsh_node_nr_to_hedge_vertex_nr[gmsh_node_nr]
            except KeyError:
                hedge_vertex_nr = len(hedge_vertices)
                hedge_vertices.append(self.nodes[gmsh_node_nr])
                gmsh_node_nr_to_hedge_vertex_nr[gmsh_node_nr] = hedge_vertex_nr
                return hedge_vertex_nr

        for el_nr, gmsh_el in enumerate(vol_elements):
            el_map = LocalToGlobalMap(
                [self.nodes[ni] for ni in gmsh_el.node_indices],
                gmsh_el.el_type)
            is_affine = el_map.is_affine()

            el_class = gmsh_el.el_type.geometry
            if not is_affine:
                try:
                    el_class = TO_CURVED_CLASS[el_class]
                except KeyError:
                    raise NotImplementedError(
                        "unsupported curved gmsh element type %s" % el_class)

            vertex_indices = [
                get_vertex_nr(gmsh_node_nr)
                for gmsh_node_nr in gmsh_el.gmsh_vertex_indices
            ]

            if is_affine:
                hedge_el = el_class(el_nr, vertex_indices, hedge_vertices)
            else:
                hedge_el = el_class(el_nr, vertex_indices, el_map)

            hedge_elements.append(hedge_el)
            hedge_el_to_gmsh_element[hedge_el] = gmsh_el

        from pytools import reverse_dictionary
        hedge_vertex_nr_to_gmsh_node_nr = reverse_dictionary(
            gmsh_node_nr_to_hedge_vertex_nr)

        del vol_elements

        def volume_tagger(el, all_v):
            return [
                self.tag_name_map[tag_nr, el.dimensions]
                for tag_nr in hedge_el_to_gmsh_element[el].tag_numbers
                if (tag_nr, el.dimensions) in self.tag_name_map
            ]

        if boundary_tagger is None:

            def boundary_tagger(fvi, el, fn, all_v):
                gmsh_vertex_nrs = frozenset(
                    hedge_vertex_nr_to_gmsh_node_nr[face_vertex_index]
                    for face_vertex_index in fvi)

                try:
                    gmsh_element = self.gmsh_vertex_nrs_to_element[
                        gmsh_vertex_nrs]
                except KeyError:
                    return []
                else:
                    x = [
                        self.tag_name_map[tag_nr, el.dimensions - 1]
                        for tag_nr in gmsh_element.tag_numbers
                        if (tag_nr, el.dimensions - 1) in self.tag_name_map
                    ]
                    if len(x) > 1:
                        from pudb import set_trace
                        set_trace()
                    return x

        vertex_array = np.array(hedge_vertices, dtype=np.float64)
        pt_dim = vertex_array.shape[-1]
        if pt_dim != vol_dim:
            from warnings import warn
            warn("Found %d-dimensional mesh embedded in %d-dimensional space. "
                 "Hedge only supports meshes of zero codimension (for now). "
                 "Maybe you want to set force_dimension=%d?" %
                 (vol_dim, pt_dim, vol_dim))

        from hedge.mesh import make_conformal_mesh_ext
        return make_conformal_mesh_ext(
            vertex_array,
            hedge_elements,
            boundary_tagger=boundary_tagger,
            volume_tagger=volume_tagger,
            periodicity=periodicity,
            allow_internal_boundaries=allow_internal_boundaries)
Exemple #5
0
def find_neighbor_vol_indices(my_discr,
                              my_part_data,
                              nb_discr,
                              nb_part_data,
                              debug=False):

    from pytools import reverse_dictionary
    l2g_vertex_indices = \
            reverse_dictionary(my_part_data.global2local_vertex_indices)
    nb_l2g_vertex_indices = \
            reverse_dictionary(nb_part_data.global2local_vertex_indices)

    my_bdry_tag = my_part_data.part_boundary_tags[nb_part_data.part_nr]
    nb_bdry_tag = nb_part_data.part_boundary_tags[my_part_data.part_nr]

    my_mesh_bdry = my_part_data.mesh.tag_to_boundary[my_bdry_tag]
    nb_mesh_bdry = nb_part_data.mesh.tag_to_boundary[nb_bdry_tag]

    my_discr_bdry = my_discr.get_boundary(my_bdry_tag)
    nb_discr_bdry = nb_discr.get_boundary(nb_bdry_tag)

    nb_vertices_to_face = dict((frozenset(el.faces[face_nr]), (el, face_nr))
                               for el, face_nr in nb_mesh_bdry)

    from_indices = []

    shuffled_indices_cache = {}

    def get_shuffled_indices(face_node_count, shuffle_op):
        try:
            return shuffled_indices_cache[shuffle_op]
        except KeyError:
            unshuffled_indices = range(face_node_count)
            result = shuffled_indices_cache[shuffle_op] = \
                    shuffle_op(unshuffled_indices)
            return result

    for my_el, my_face_nr in my_mesh_bdry:
        eslice, ldis = my_discr.find_el_data(my_el.id)

        my_vertices = my_el.faces[my_face_nr]
        my_global_vertices = tuple(l2g_vertex_indices[vi]
                                   for vi in my_vertices)

        face_node_count = ldis.face_node_count()
        try:
            nb_vertices = frozenset(
                nb_part_data.global2local_vertex_indices[vi]
                for vi in my_global_vertices)
            # continue below in else part
        except KeyError:
            # this happens if my_global_vertices is not a permutation
            # of the neighbor's face vertices. Periodicity is the only
            # reason why that would be so.
            my_global_vertices_there, axis = my_part_data.global_periodic_opposite_faces[
                my_global_vertices]

            nb_vertices = frozenset(
                nb_part_data.global2local_vertex_indices[vi]
                for vi in my_global_vertices_there)

            nb_el, nb_face_nr = nb_vertices_to_face[nb_vertices]
            nb_global_vertices_there = tuple(nb_l2g_vertex_indices[vi]
                                             for vi in nb_el.faces[nb_face_nr])

            nb_global_vertices, axis2 = nb_part_data.global_periodic_opposite_faces[
                nb_global_vertices_there]

            assert axis == axis2

            nb_face_start = nb_discr_bdry \
                    .find_facepair((nb_el, nb_face_nr)) \
                    .opp.el_base_index

            shuffle_op = \
                    ldis.get_face_index_shuffle_to_match(
                            my_global_vertices,
                            nb_global_vertices)

            shuffled_nb_node_indices = [
                nb_face_start + i
                for i in get_shuffled_indices(face_node_count, shuffle_op)
            ]

            from_indices.extend(shuffled_nb_node_indices)

            # check if the nodes really match up
            if debug and ldis.has_facial_nodes:
                my_node_indices = [
                    eslice.start + i for i in ldis.face_indices()[my_face_nr]
                ]

                for my_i, nb_i in zip(my_node_indices,
                                      shuffled_nb_node_indices):
                    dist = my_discr.nodes[my_i] - nb_discr_bdry.nodes[nb_i]
                    dist[axis] = 0
                    assert la.norm(dist) < 1e-14
        else:
            # continue handling of nonperiodic case
            nb_el, nb_face_nr = nb_vertices_to_face[nb_vertices]
            nb_global_vertices = tuple(nb_l2g_vertex_indices[vi]
                                       for vi in nb_el.faces[nb_face_nr])

            nb_face_start = nb_discr_bdry \
                    .find_facepair((nb_el, nb_face_nr)) \
                    .opp.el_base_index

            shuffle_op = \
                    ldis.get_face_index_shuffle_to_match(
                            my_global_vertices,
                            nb_global_vertices)

            shuffled_nb_node_indices = [
                nb_face_start + i
                for i in get_shuffled_indices(face_node_count, shuffle_op)
            ]

            from_indices.extend(shuffled_nb_node_indices)

            # Check if the nodes really match up
            if debug and ldis.has_facial_nodes:
                my_node_indices = [
                    eslice.start + i for i in ldis.face_indices()[my_face_nr]
                ]

                for my_i, nb_i in zip(my_node_indices,
                                      shuffled_nb_node_indices):
                    dist = my_discr.nodes[my_i] - nb_discr_bdry.nodes[nb_i]
                    assert la.norm(dist) < 1e-14

        # Finally, unify FluxFace.h values across boundary.
        my_flux_face = my_discr_bdry.find_facepair_side((my_el, my_face_nr))
        nb_flux_face = nb_discr_bdry.find_facepair_side((nb_el, nb_face_nr))
        my_flux_face.h = nb_flux_face.h = max(my_flux_face.h, nb_flux_face.h)

    assert len(from_indices) \
            == len(my_discr_bdry.nodes) \
            == len(nb_discr_bdry.nodes)

    # Convert nb's boundary indices to nb's volume indices.
    return nb_discr_bdry.vol_indices[numpy.asarray(from_indices,
                                                   dtype=numpy.intp)]
Exemple #6
0
    def _setup_neighbor_connections(self):
        comm = self.context.communicator

        # Why is this barrier needed? Some of our ranks may arrive at this
        # point early and start sending packets to ranks that are still stuck
        # in previous wildcard-recv loops. These receivers will then be very
        # confused by packets they didn't expect, and, once they reach their
        # recv bit in *this* subroutine, will wait for packets that will never
        # arrive. This same argument does not apply to other recv()s in this
        # file because they are targeted and thus benefit from MPI's
        # non-overtaking rule.
        #
        # Parallel programming is fun.
        comm.Barrier()

        if self.neighbor_ranks:
            # send interface information to neighboring ranks -----------------
            from pytools import reverse_dictionary
            local2global_vertex_indices = \
                    reverse_dictionary(self.global2local_vertex_indices)

            send_requests = []

            for rank in self.neighbor_ranks:
                bdry_tag = hedge.mesh.TAG_RANK_BOUNDARY(rank)
                rank_bdry = self.subdiscr.mesh.tag_to_boundary[bdry_tag]
                rank_discr_boundary = self.subdiscr.get_boundary(bdry_tag)

                # a list of global vertex numbers for each face
                my_vertices_global = [
                    tuple(local2global_vertex_indices[vi]
                          for vi in el.faces[face_nr])
                    for el, face_nr in rank_bdry
                ]

                # a list of node coordinates, indicating the order
                # in which nodal values will be sent, this is for
                # testing only and could (potentially) be omitted

                my_node_coords = []
                for el, face_nr in rank_bdry:
                    eslice, ldis = self.subdiscr.find_el_data(el.id)
                    findices = ldis.face_indices()[face_nr]

                    my_node_coords.append(
                        [self.nodes[eslice.start + i] for i in findices])

                # compile a list of FluxFace.h values for unification
                # across the rank boundary

                my_h_values = [
                    rank_discr_boundary.find_facepair_side(el_face).h
                    for el_face in rank_bdry
                ]

                packet = (my_vertices_global, my_node_coords, my_h_values)

                send_requests.append(comm.isend(packet, dest=rank, tag=0))

            received_packets = {}
            while len(received_packets) < len(self.neighbor_ranks):
                status = mpi.Status()
                received_packet = comm.recv(tag=0,
                                            source=mpi.ANY_SOURCE,
                                            status=status)
                received_packets[status.source] = received_packet

            mpi.Request.Waitall(send_requests)

            # process received packets ----------------------------------------
            from pytools import flatten

            # nb_ stands for neighbor_

            self.from_neighbor_maps = {}

            for rank, (nb_all_facevertices_global, nb_node_coords, nb_h_values) in \
                    received_packets.iteritems():
                bdry_tag = hedge.mesh.TAG_RANK_BOUNDARY(rank)
                rank_bdry = self.subdiscr.mesh.tag_to_boundary[bdry_tag]
                rank_discr_boundary = self.subdiscr.get_boundary(bdry_tag)

                flat_nb_node_coords = list(flatten(nb_node_coords))

                # step 1: find start node indices for each
                # of the neighbor's elements
                nb_face_starts = [0]
                for node_coords in nb_node_coords[:-1]:
                    nb_face_starts.append(nb_face_starts[-1] +
                                          len(node_coords))

                # step 2: match faces by matching vertices
                nb_face_order = dict(
                    (frozenset(vertices), i)
                    for i, vertices in enumerate(nb_all_facevertices_global))

                # step 3: make a list of indices into the data we
                # receive from our neighbor that'll tell us how
                # to reshuffle them to match our node order
                from_indices = []

                shuffled_indices_cache = {}

                def get_shuffled_indices(face_node_count, shuffle_op):
                    try:
                        return shuffled_indices_cache[shuffle_op]
                    except KeyError:
                        unshuffled_indices = range(face_node_count)
                        result = shuffled_indices_cache[shuffle_op] = \
                                shuffle_op(unshuffled_indices)
                        return result

                for el, face_nr in rank_bdry:
                    eslice, ldis = self.subdiscr.find_el_data(el.id)

                    my_vertices = el.faces[face_nr]
                    my_global_vertices = tuple(local2global_vertex_indices[vi]
                                               for vi in my_vertices)

                    face_node_count = ldis.face_node_count()
                    try:
                        nb_face_idx = nb_face_order[frozenset(
                            my_global_vertices)]
                        # continue below in else part
                    except KeyError:
                        # this happens if my_global_vertices is not a permutation
                        # of the neighbor's face vertices. Periodicity is the only
                        # reason why that would be so.
                        my_vertices_there, axis = \
                                self.global_periodic_opposite_faces[
                                        my_global_vertices]
                        nb_face_idx = nb_face_order[frozenset(
                            my_vertices_there)]

                        his_vertices_here, axis2 = \
                                self.global_periodic_opposite_faces[
                                        nb_all_facevertices_global[nb_face_idx]]

                        assert axis == axis2

                        nb_face_start = nb_face_starts[nb_face_idx]

                        shuffle_op = \
                                ldis.get_face_index_shuffle_to_match(
                                        my_global_vertices,
                                        his_vertices_here)

                        shuffled_other_node_indices = [
                            nb_face_start + i for i in get_shuffled_indices(
                                face_node_count, shuffle_op)
                        ]

                        from_indices.extend(shuffled_other_node_indices)

                        # check if the nodes really match up
                        if "parallel_setup" in self.debug:
                            my_node_indices = [
                                eslice.start + i
                                for i in ldis.face_indices()[face_nr]
                            ]

                            for my_i, other_i in zip(
                                    my_node_indices,
                                    shuffled_other_node_indices):
                                dist = self.nodes[my_i] - flat_nb_node_coords[
                                    other_i]
                                dist[axis] = 0
                                assert la.norm(dist) < 1e-14
                    else:
                        # continue handling of nonperiodic case
                        nb_global_vertices = nb_all_facevertices_global[
                            nb_face_idx]

                        nb_face_start = nb_face_starts[nb_face_idx]

                        shuffle_op = \
                                ldis.get_face_index_shuffle_to_match(
                                        my_global_vertices,
                                        nb_global_vertices)

                        shuffled_other_node_indices = [
                            nb_face_start + i for i in get_shuffled_indices(
                                face_node_count, shuffle_op)
                        ]

                        from_indices.extend(shuffled_other_node_indices)

                        # check if the nodes really match up
                        if "parallel_setup" in self.debug:
                            my_node_indices = [
                                eslice.start + i
                                for i in ldis.face_indices()[face_nr]
                            ]

                            for my_i, other_i in zip(
                                    my_node_indices,
                                    shuffled_other_node_indices):
                                dist = self.nodes[my_i] - flat_nb_node_coords[
                                    other_i]
                                assert la.norm(dist) < 1e-14

                    # finally, unify FluxFace.h values across boundary
                    nb_h = nb_h_values[nb_face_idx]
                    flux_face = rank_discr_boundary.find_facepair_side(
                        (el, face_nr))
                    flux_face.h = max(nb_h, flux_face.h)

                if "parallel_setup" in self.debug:
                    assert len(from_indices) == len(flat_nb_node_coords)

                # construct from_neighbor_map
                self.from_neighbor_maps[rank] = \
                        self.subdiscr.prepare_from_neighbor_map(from_indices)
Exemple #7
0
def make_conformal_mesh_ext(points, elements,
        boundary_tagger=None,
        volume_tagger=None,
        periodicity=None,
        allow_internal_boundaries=False,
        _is_rankbdry_face=None,
        ):
    """Construct a simplicial mesh.

    Face indices follow the convention for the respective element,
    such as Triangle or Tetrahedron, in this module.

    :param points: an array of vertex coordinates, given as vectors.
    :param elements: an iterable of :class:`hedge.mesh.element.Element`
      instances.
    :param boundary_tagger: A function of *(fvi, el, fn, all_v)* 
      that returns a list of boundary tags for a face identified
      by the parameters.

      *fvi* is the set of vertex indices of the face
      in question, *el* is an :class:`Element` instance,
      *fn* is the face number within *el*, and *all_v* is 
      a list of all vertices.
    :param volume_tagger: A function of *(el, all_v)* 
      returning a list of volume tags for the element identified
      by the parameters.

      *el* is an :class:`Element` instance and *all_v* is a list of
      all vertex coordinates.
    :param periodicity: either None or is a list of tuples
      just like the one documented for the `periodicity`
      member of class :class:`Mesh`.
    :param allow_internal_boundaries: Calls the boundary tagger
      for element-element interfaces as well. If the tagger returns
      an empty list of tags for an internal interface, it remains
      internal.
    :param _is_rankbdry_face: an implementation detail,
      should not be used from user code. It is a function
      returning whether a given face identified by
      *(element instance, face_nr)* is cut by a parallel
      mesh partition.
    """

    # input validation 
    if (not isinstance(points, numpy.ndarray) 
            or not points.dtype == numpy.float64):
        raise TypeError("points must be a float64 array")

    if boundary_tagger is None:
        def boundary_tagger(fvi, el, fn, all_v):
            return []

    if volume_tagger is None:
        def volume_tagger(el, all_v):
            return []

    if _is_rankbdry_face is None:
        def _is_rankbdry_face(el_face):
            return False

    dim = max(el.dimensions for el in elements)
    if periodicity is None:
        periodicity = dim*[None]
    assert len(periodicity) == dim

    # tag elements
    tag_to_elements = {TAG_NONE: [], TAG_ALL: []}
    for el in elements:
        for el_tag in volume_tagger(el, points):
            tag_to_elements.setdefault(el_tag, []).append(el)
        tag_to_elements[TAG_ALL].append(el)

    # create face_map, which is a mapping of
    # (vertices on a face) ->
    #  [(element, face_idx) for elements bordering that face]
    face_map = {}
    for el in elements:
        for fid, face_vertices in enumerate(el.faces):
            face_map.setdefault(frozenset(face_vertices), []).append((el, fid))

    # build non-periodic connectivity structures
    interfaces = []
    tag_to_boundary = {
            TAG_NONE: [],
            TAG_ALL: [],
            TAG_REALLY_ALL: [],
            }

    for face_vertices, els_faces in face_map.iteritems():
        boundary_el_faces_tags = []
        if len(els_faces) == 2:
            if allow_internal_boundaries:
                el_face_a, el_face_b = els_faces
                el_a, face_a = el_face_a
                el_b, face_b = el_face_b

                tags_a = boundary_tagger(face_vertices, el_a, face_a, points)
                tags_b = boundary_tagger(face_vertices, el_b, face_b, points)

                if not tags_a and not tags_b:
                    interfaces.append(els_faces)
                elif tags_a and tags_b:
                    boundary_el_faces_tags.append((el_face_a, tags_a))
                    boundary_el_faces_tags.append((el_face_b, tags_b))
                else:
                    raise RuntimeError("boundary tagger is inconsistent "
                            "about boundary-ness of interior interface")
            else:
                interfaces.append(els_faces)
        elif len(els_faces) == 1:
            el_face = el, face = els_faces[0]
            tags = boundary_tagger(face_vertices, el, face, points)
            boundary_el_faces_tags.append((el_face, tags))
        else:
            raise RuntimeError("face can at most border two elements")

        for el_face, tags in boundary_el_faces_tags:
            el, face = el_face
            tags = set(tags) - MESH_CREATION_TAGS
            assert not isinstance(tags, str), \
                RuntimeError("Received string as tag list")
            assert TAG_ALL not in tags
            assert TAG_REALLY_ALL not in tags

            for btag in tags:
                tag_to_boundary.setdefault(btag, []) \
                        .append(el_face)

            if TAG_NO_BOUNDARY not in tags:
                # TAG_NO_BOUNDARY is used to mark rank interfaces
                # as not being part of the boundary
                tag_to_boundary[TAG_ALL].append(el_face)

            tag_to_boundary[TAG_REALLY_ALL].append(el_face)

    # add periodicity-induced connectivity
    from pytools import flatten, reverse_dictionary

    periodic_opposite_faces = {}
    periodic_opposite_vertices = {}

    for tag_bdries in tag_to_boundary.itervalues():
        assert len(set(tag_bdries)) == len(tag_bdries)

    for axis, axis_periodicity in enumerate(periodicity):
        if axis_periodicity is not None:
            # find faces on +-axis boundaries
            minus_tag, plus_tag = axis_periodicity
            minus_faces = tag_to_boundary.get(minus_tag, [])
            plus_faces = tag_to_boundary.get(plus_tag, [])

            # find vertex indices and points on these faces
            minus_vertex_indices = list(set(flatten(el.faces[face]
                for el, face in minus_faces)))
            plus_vertex_indices = list(set(flatten(el.faces[face]
                for el, face in plus_faces)))

            minus_z_points = [points[pi] for pi in minus_vertex_indices]
            plus_z_points = [points[pi] for pi in plus_vertex_indices]

            # find a mapping from -axis to +axis vertices
            minus_to_plus, not_found = find_matching_vertices_along_axis(
                    axis, minus_z_points, plus_z_points,
                    minus_vertex_indices, plus_vertex_indices)
            plus_to_minus = reverse_dictionary(minus_to_plus)

            for a, b in minus_to_plus.iteritems():
                periodic_opposite_vertices.setdefault(a, []).append((b, axis))
                periodic_opposite_vertices.setdefault(b, []).append((a, axis))

            # establish face connectivity
            for minus_face in minus_faces:
                minus_el, minus_fi = minus_face
                minus_fvi = minus_el.faces[minus_fi]

                try:
                    mapped_plus_fvi = tuple(minus_to_plus[i] for i in minus_fvi)
                    plus_faces = face_map[frozenset(mapped_plus_fvi)]
                    assert len(plus_faces) == 1
                except KeyError:
                    # is our periodic counterpart is in a different mesh clump?
                    if _is_rankbdry_face(minus_face):
                        # if so, cool. parallel handler will take care of it.
                        continue
                    else:
                        # if not, bad.
                        raise

                plus_face = plus_faces[0]
                interfaces.append([minus_face, plus_face])

                plus_el, plus_fi = plus_face
                plus_fvi = plus_el.faces[plus_fi]

                mapped_minus_fvi = tuple(plus_to_minus[i] for i in plus_fvi)

                # periodic_opposite_faces maps face vertex tuples from
                # one end of the periodic domain to the other, while
                # correspondence between each entry

                periodic_opposite_faces[minus_fvi] = mapped_plus_fvi, axis
                periodic_opposite_faces[plus_fvi] = mapped_minus_fvi, axis

                tag_to_boundary[TAG_ALL].remove(plus_face)
                tag_to_boundary[TAG_ALL].remove(minus_face)

                tag_to_boundary[TAG_REALLY_ALL].remove(plus_face)
                tag_to_boundary[TAG_REALLY_ALL].remove(minus_face)

    return ConformalMesh(
            points=points,
            elements=elements,
            interfaces=interfaces,
            tag_to_boundary=tag_to_boundary,
            tag_to_elements=tag_to_elements,
            periodicity=periodicity,
            periodic_opposite_faces=periodic_opposite_faces,
            periodic_opposite_vertices=periodic_opposite_vertices,
            has_internal_boundaries=allow_internal_boundaries,
            )
    def build_mesh(self):
        # figure out dimensionalities
        node_dim = single_valued(len(node) for node in nodes)
        vol_dim = max(el.el_type.dimensions for key, el in gmsh_vertex_nrs_to_element.iteritems())
        bdry_dim = vol_dim - 1

        vol_elements = [el for key, el in gmsh_vertex_nrs_to_element.iteritems() if el.el_type.dimensions == vol_dim]
        bdry_elements = [el for key, el in gmsh_vertex_nrs_to_element.iteritems() if el.el_type.dimensions == bdry_dim]

        # build hedge-compatible elements
        from hedge.mesh.element import TO_CURVED_CLASS

        hedge_vertices = []
        hedge_elements = []

        gmsh_node_nr_to_hedge_vertex_nr = {}
        hedge_el_to_gmsh_element = {}

        def get_vertex_nr(gmsh_node_nr):
            try:
                return gmsh_node_nr_to_hedge_vertex_nr[gmsh_node_nr]
            except KeyError:
                hedge_vertex_nr = len(hedge_vertices)
                hedge_vertices.append(nodes[gmsh_node_nr])
                gmsh_node_nr_to_hedge_vertex_nr[gmsh_node_nr] = hedge_vertex_nr
                return hedge_vertex_nr

        for el_nr, gmsh_el in enumerate(vol_elements):
            el_map = LocalToGlobalMap([nodes[ni] for ni in gmsh_el.node_indices], gmsh_el.el_type)
            is_affine = el_map.is_affine()

            el_class = gmsh_el.el_type.geometry
            if not is_affine:
                try:
                    el_class = TO_CURVED_CLASS[el_class]
                except KeyError:
                    raise GmshFileFormatError("unsupported curved element type %s" % el_class)

            vertex_indices = [get_vertex_nr(gmsh_node_nr) for gmsh_node_nr in gmsh_el.gmsh_vertex_indices]

            if is_affine:
                hedge_el = el_class(el_nr, vertex_indices, hedge_vertices)
            else:
                hedge_el = el_class(el_nr, vertex_indices, el_map)

            hedge_elements.append(hedge_el)
            hedge_el_to_gmsh_element[hedge_el] = gmsh_el

        from pytools import reverse_dictionary

        hedge_vertex_nr_to_gmsh_node_nr = reverse_dictionary(gmsh_node_nr_to_hedge_vertex_nr)

        del vol_elements

        def volume_tagger(el, all_v):
            return [
                tag_name_map[tag_nr, el.dimensions]
                for tag_nr in hedge_el_to_gmsh_element[el].tag_numbers
                if (tag_nr, el.dimensions) in tag_name_map
            ]

        def boundary_tagger(fvi, el, fn, all_v):
            gmsh_vertex_nrs = frozenset(hedge_vertex_nr_to_gmsh_node_nr[face_vertex_index] for face_vertex_index in fvi)

            try:
                gmsh_element = gmsh_vertex_nrs_to_element[gmsh_vertex_nrs]
            except KeyError:
                return []
            else:
                x = [
                    tag_name_map[tag_nr, el.dimensions - 1]
                    for tag_nr in gmsh_element.tag_numbers
                    if (tag_nr, el.dimensions - 1) in tag_name_map
                ]
                if len(x) > 1:
                    from pudb import set_trace

                    set_trace()
                return x

        vertex_array = np.array(hedge_vertices, dtype=np.float64)
        pt_dim = vertex_array.shape[-1]
        if pt_dim != vol_dim:
            from warnings import warn

            warn(
                "Found %d-dimensional mesh embedded in %d-dimensional space. "
                "Hedge only supports meshes of zero codimension (for now). "
                "Maybe you want to set force_dimension=%d?" % (vol_dim, pt_dim, vol_dim)
            )

        from hedge.mesh import make_conformal_mesh_ext

        return make_conformal_mesh_ext(
            vertex_array,
            hedge_elements,
            boundary_tagger=boundary_tagger,
            volume_tagger=volume_tagger,
            periodicity=periodicity,
            allow_internal_boundaries=allow_internal_boundaries,
        )
Exemple #9
0
def generate_proteus_problem_file(bvp, clsnm):
    from ibvp.language import scalarize
    scalarized_system = scalarize(bvp)

    #import ibvp.sym as sym
    #print(sym.pretty(scalarized_system.pde_system))

    distr_system = DistributeMapper()(scalarized_system.pde_system)

    scalar_unknowns = [v.name for v in scalarized_system.unknowns]

    num_equations = len(scalar_unknowns)
    ambient_dim = bvp.ambient_dim

    if len(set(scalar_unknowns)) != len(scalar_unknowns):
        raise ValueError("names of unknowns not unique "
                "after scalarization")

    # import ibvp.sym as sym
    # print sym.pretty(distr_system)

    tc_storage = TransportCoefficientStorage(scalarized_system,
                                             bvp.ambient_dim,
                                             scalar_unknowns)

    has_time_derivative = HasTimeDerivativeMapper()
    has_spatial_derivative = HasSpatialDerivativeMapper()

    for i, eqn_i in enumerate(distr_system):
        if isinstance(eqn_i, pp.Sum):
            terms = eqn_i.children
        else:
            terms = (eqn_i,)

        for term in terms:
            constant, term_without_constant = pick_off_constants(term)

            if isinstance(term_without_constant, p.OperatorBinding):
                op = term_without_constant.op

                if isinstance(op, p.TimeDerivativeOperator):
                    if has_spatial_derivative(term_without_constant.argument):
                        raise ValueError("no spatial derivatives allowed inside "
                                "of time derivative")
                    tc_storage.mass[i] += (
                            constant * term_without_constant.argument)
                    continue

                if has_time_derivative(term_without_constant):
                    raise ValueError("time derivatives found below "
                            "root of tree of term '%s'" % pretty(term))

                if isinstance(op, p.DerivativeOperator):
                    outer_deriv_axis = term_without_constant.op.ambient_axis
                    outer_deriv_argument = term_without_constant.argument

                    if not has_spatial_derivative(outer_deriv_argument):
                        tc_storage.advection[i, outer_deriv_axis] += (
                                constant * outer_deriv_argument)
                    else:
                        # diffusion
                        coeff, inner_derivative = \
                                find_inner_deriv_and_coeff(outer_deriv_argument)

                        pot_const, pot_expr = pick_off_constants(
                                inner_derivative.argument)
                        pot_index = tc_storage.register_potential(pot_expr)

                        tc_storage.diffusion[
                                i, pot_index,
                                outer_deriv_axis,
                                inner_derivative.op.ambient_axis] \
                                        += pot_const*coeff

                else:
                    raise ValueError("unexpected operator: %s"
                            % type(term_without_constant.op).__name__)
            else:
                if has_time_derivative(term_without_constant):
                    raise ValueError("time derivatives found below "
                            "root of tree of term '%s'" % pretty(term))

                if has_spatial_derivative(term_without_constant):
                    tc_storage.hamiltonian[i] += term
                else:
                    tc_storage.reaction[i] += term

    # Python code we generate, we create references to the coefficient arrays
    # in the dictionary that will conveniently have the same name as our
    # pymbolic variables.  This makes printing easy and has no major
    # performance penalty.

    defs_list = ["    %s = c[('u', %d)]" % (str(v), i)
                 for (i, v) in enumerate(scalar_unknowns)]

    defs = '\n'.join(defs_list)  # noqa

    unk_scalar_fields = [p.Field(psi) for psi in scalar_unknowns]

    def process_scalar_bin(holder, label):
        assign = []
        dassign = []
        deplabels = np.zeros((num_equations, num_equations), 'O')
        deplabels[:] = 'none'
        for (i, x) in enumerate(holder):
            if x:
                xstr = "c[('%s', %d)][:] = %s" % (label, i, x)
                assign.append(xstr)
                for j, psi in enumerate(unk_scalar_fields):
                    dx = differentiate(x, psi)
                    if dx:
                        deplabels[i][j] = classify_dep(dx)
                        if deplabels[i][j] == 'nonlinear' \
                           or deplabels[i][j] == 'linear':
                            dxstr = "c[('d%s', %d, %d)][:] = %s" % (label, i, j, dx)
                            dassign.append(dxstr)
                    else:
                        pass

        return assign, dassign, deplabels

    mass_assigns, dmass_assigns, mass_deps \
        = process_scalar_bin(tc_storage.mass, "m")

    for md in mass_deps.ravel():
        if md == 'constant':
            raise Exception("Constant mass illegal")

    reaction_assigns, dreaction_assigns, reaction_deps \
        = process_scalar_bin(tc_storage.reaction, "r")

    hamiltonian_assigns, dhamiltonian_assigns, hamiltonian_deps \
        = process_scalar_bin(tc_storage.hamiltonian, "h")

    advect_assigns = []
    dadvect_assigns = []

    advect_deps_p = np.zeros((num_equations, num_equations, ambient_dim), 'O')
    advect_deps_p[:] = 'none'

    for i, bi in enumerate(tc_storage.advection):
        for j, bij in enumerate(bi):
            if bij:
                bstr = "c[('f', %d)][..., %d] = %s" % (i, j, bij)
                advect_assigns.append(bstr)
                for k, psi in enumerate(unk_scalar_fields):
                    dbij = differentiate(bij, psi)
                    if dbij:
                        advect_deps_p[i, k, j] = classify_dep(dbij)
                        dbstr = "c[('df', %d, %d)][...,%d] = %s" % (i, k, j, dbij)
                        dadvect_assigns.append(dbstr)

    # now "reduce" over the vector component dependences and take the worst.
    dep2int = {'none': 0,
               'constant':  1,
               'linear':    2,
               'nonlinear': 3}
    from pytools import reverse_dictionary
    int2dep = reverse_dictionary(dep2int)

    advect_deps = np.zeros((num_equations, num_equations), "O")
    for i in range(num_equations):
        for j in range(num_equations):
            advect_deps[i, j] = int2dep[
                    reduce(max,
                        (dep2int[x] for x in advect_deps_p[i, j, :]),
                        0)
                    ]

    diff_assigns = []
    ddiff_assigns = []
    diff_deps_p = np.zeros((num_equations,
                            num_equations,
                            num_equations,
                            ambient_dim,
                            ambient_dim), 'O')

    diff_deps_p[:] = 'none'

    for i, ai in enumerate(tc_storage.diffusion):
        for j, aij in enumerate(ai):
            for k, aijk in enumerate(aij):
                for ell, aijkell in enumerate(aijk):
                    if aijkell:
                        astr = "c[('a', %d, %d)][..., %d, %d] = %s" \
                               % (i, j, k, ell, aijkell)
                        diff_assigns.append(astr)
                        for q, psi in enumerate(unk_scalar_fields):
                            da = differentiate(aijkell, psi)
                            if da:
                                diff_deps_p[i, j, q, k, ell] = classify_dep(da)
                                dastr = "c[('da',%d,%d,%d)][...,%d,%d] = %s" \
                                        % (i, j, q, k, ell, da)
                                ddiff_assigns.append(dastr)
                            else:
                                diff_deps_p[i, j, q, k, ell] = 'constant'

    diff_deps = np.zeros((num_equations,
                          num_equations,
                          num_equations), 'O')

    ddp = diff_deps_p.reshape((num_equations,
                               num_equations,
                               num_equations,
                               ambient_dim**2))

    for i in range(num_equations):
        for j in range(num_equations):
            for k in range(num_equations):
                diff_deps[i, j, k] = int2dep[
                                        reduce(max,
                                            (dep2int[x] for x in ddp[i, j, k]), 0)]

    # potential is a bit different from other scalars.
    potential_assigns = []
    dpotential_assigns = []

    phi_deps = np.zeros((num_equations, num_equations), 'O')
    for i, phi in enumerate(tc_storage.potential):
        for j, u in enumerate(unk_scalar_fields):
            if phi == u:
                phi_deps[i, j] = 'u'
            else:
                phi_str = "c[('phi', %d)] = %s" % (i, phi)
                potential_assigns.extend(phi_str)
                D = differentiate(phi, u)
                if D:
                    phi_deps[i, j] = 'nonlinear'
                    dphi_str = "c[('dphi', %d, %d)] = %s" % (i, j, D)
                    dpotential_assigns.extend(dphi_str)

    def spacer(x):
        return "        " + x

    assigns = "\n".join(
                map(spacer,
                    reduce(lambda x, y: x+y,
                           [mass_assigns, dmass_assigns,
                            advect_assigns, dadvect_assigns,
                            diff_assigns, ddiff_assigns,
                            reaction_assigns, dreaction_assigns,
                            hamiltonian_assigns, dhamiltonian_assigns])))

    # we dict-ify the dependencies so we can repr them.
    def dictify(arr):
        if len(arr.shape) == 1:
            return dict((i, a) for (i, a) in enumerate(arr) if a and a != 'none')
        else:
            result = {}
            for i, a in enumerate(arr):
                da = dictify(a)
                if len(da) > 0:
                    result[i] = da
            return result

    names = ["mass", "advection", "diffusion", "potential",
            "reaction", "hamiltonian"]
    deps = [mass_deps, advect_deps, diff_deps, phi_deps,
            reaction_deps, hamiltonian_deps]

    dep_stmnts = []
    for (nm, d) in zip(names, deps):
        ddict = dictify(d)
        dep_stmnts.append("        %s = %s" % (nm, repr(ddict)))

    dep_st = "\n".join(dep_stmnts)

    # This is for creating, e.g. u = c[('u',0)] before we make assignments
    # in evaluate so that we have references into the c dictionary for our
    # data.  This makes the pretty-printed code more readable.
    ref_list = []
    for i, phi in enumerate(scalar_unknowns):
        ref_list.append("%s = c[('u',%d)]" % (phi, i))

    refs = "\n".join((spacer(x) for x in ref_list))

    tc_class_str = """
from proteus.TransportCoefficients import TC_base

class %s(TC_base):
    def __init__(self):
%s
        variableNames=%s
        TC_base.__init__(self,
                         nc=%d,
                         mass=mass,
                         advection=advection,
                         diffusion=diffusion,
                         potential=potential,
                         reaction=reaction,
                         hamiltonian=hamiltonian,
                         variableNames=variableNames)

    def evaluate(self, t, c):
%s
%s
""" % (clsnm, dep_st, repr(scalar_unknowns), num_equations, refs, assigns)

    return(tc_class_str)
Exemple #10
0
def find_neighbor_vol_indices(
        my_discr, my_part_data,
        nb_discr, nb_part_data,
        debug=False):

    from pytools import reverse_dictionary
    l2g_vertex_indices = \
            reverse_dictionary(my_part_data.global2local_vertex_indices)
    nb_l2g_vertex_indices = \
            reverse_dictionary(nb_part_data.global2local_vertex_indices)

    my_bdry_tag = my_part_data.part_boundary_tags[nb_part_data.part_nr]
    nb_bdry_tag = nb_part_data.part_boundary_tags[my_part_data.part_nr]

    my_mesh_bdry = my_part_data.mesh.tag_to_boundary[my_bdry_tag]
    nb_mesh_bdry = nb_part_data.mesh.tag_to_boundary[nb_bdry_tag]

    my_discr_bdry = my_discr.get_boundary(my_bdry_tag)
    nb_discr_bdry = nb_discr.get_boundary(nb_bdry_tag)

    nb_vertices_to_face = dict(
            (frozenset(el.faces[face_nr]), (el, face_nr))
            for el, face_nr
            in nb_mesh_bdry)

    from_indices = []

    shuffled_indices_cache = {}

    def get_shuffled_indices(face_node_count, shuffle_op):
        try:
            return shuffled_indices_cache[shuffle_op]
        except KeyError:
            unshuffled_indices = range(face_node_count)
            result = shuffled_indices_cache[shuffle_op] = \
                    shuffle_op(unshuffled_indices)
            return result

    for my_el, my_face_nr in my_mesh_bdry:
        eslice, ldis = my_discr.find_el_data(my_el.id)

        my_vertices = my_el.faces[my_face_nr]
        my_global_vertices = tuple(l2g_vertex_indices[vi]
                for vi in my_vertices)

        face_node_count = ldis.face_node_count()
        try:
            nb_vertices = frozenset(
                    nb_part_data.global2local_vertex_indices[vi]
                    for vi in my_global_vertices)
            # continue below in else part
        except KeyError:
            # this happens if my_global_vertices is not a permutation
            # of the neighbor's face vertices. Periodicity is the only
            # reason why that would be so.
            my_global_vertices_there, axis = my_part_data.global_periodic_opposite_faces[
                    my_global_vertices]

            nb_vertices = frozenset(
                    nb_part_data.global2local_vertex_indices[vi]
                    for vi in my_global_vertices_there)

            nb_el, nb_face_nr = nb_vertices_to_face[nb_vertices]
            nb_global_vertices_there = tuple(
                    nb_l2g_vertex_indices[vi]
                    for vi in nb_el.faces[nb_face_nr])

            nb_global_vertices, axis2 = nb_part_data.global_periodic_opposite_faces[
                    nb_global_vertices_there]

            assert axis == axis2

            nb_face_start = nb_discr_bdry \
                    .find_facepair((nb_el, nb_face_nr)) \
                    .opp.el_base_index

            shuffle_op = \
                    ldis.get_face_index_shuffle_to_match(
                            my_global_vertices,
                            nb_global_vertices)

            shuffled_nb_node_indices = [nb_face_start+i
                    for i in get_shuffled_indices(face_node_count, shuffle_op)]

            from_indices.extend(shuffled_nb_node_indices)

            # check if the nodes really match up
            if debug and ldis.has_facial_nodes:
                my_node_indices = [eslice.start+i for i in ldis.face_indices()[my_face_nr]]

                for my_i, nb_i in zip(my_node_indices, shuffled_nb_node_indices):
                    dist = my_discr.nodes[my_i]-nb_discr_bdry.nodes[nb_i]
                    dist[axis] = 0
                    assert la.norm(dist) < 1e-14
        else:
            # continue handling of nonperiodic case
            nb_el, nb_face_nr = nb_vertices_to_face[nb_vertices]
            nb_global_vertices = tuple(
                    nb_l2g_vertex_indices[vi]
                    for vi in nb_el.faces[nb_face_nr])

            nb_face_start = nb_discr_bdry \
                    .find_facepair((nb_el, nb_face_nr)) \
                    .opp.el_base_index

            shuffle_op = \
                    ldis.get_face_index_shuffle_to_match(
                            my_global_vertices,
                            nb_global_vertices)

            shuffled_nb_node_indices = [nb_face_start+i
                    for i in get_shuffled_indices(face_node_count, shuffle_op)]

            from_indices.extend(shuffled_nb_node_indices)

            # Check if the nodes really match up
            if debug and ldis.has_facial_nodes:
                my_node_indices = [eslice.start+i
                        for i in ldis.face_indices()[my_face_nr]]

                for my_i, nb_i in zip(my_node_indices, shuffled_nb_node_indices):
                    dist = my_discr.nodes[my_i]-nb_discr_bdry.nodes[nb_i]
                    assert la.norm(dist) < 1e-14

        # Finally, unify FluxFace.h values across boundary.
        my_flux_face = my_discr_bdry.find_facepair_side((my_el, my_face_nr))
        nb_flux_face = nb_discr_bdry.find_facepair_side((nb_el, nb_face_nr))
        my_flux_face.h = nb_flux_face.h = max(my_flux_face.h, nb_flux_face.h)

    assert len(from_indices) \
            == len(my_discr_bdry.nodes) \
            == len(nb_discr_bdry.nodes)

    # Convert nb's boundary indices to nb's volume indices.
    return nb_discr_bdry.vol_indices[
            numpy.asarray(from_indices, dtype=numpy.intp)]
Exemple #11
0
def parse_gmsh(line_iterable, force_dimension=None, periodicity=None,
        allow_internal_boundaries=False, tag_mapper=lambda tag: tag):
    """
    :param force_dimension: if not None, truncate point coordinates to this many dimensions.
    """

    feeder = LineFeeder(line_iterable)
    element_type_map = GMSH_ELEMENT_TYPE_TO_INFO_MAP

    # collect the mesh information
    nodes = []
    elements = []

    # maps (tag_number, dimension) -> tag_name
    tag_name_map = {}

    gmsh_vertex_nrs_to_element = {}

    class ElementInfo(Record):
        pass

    while feeder.has_next_line():
        next_line = feeder.get_next_line()
        if not next_line.startswith("$"):
            raise GmshFileFormatError("expected start of section, '%s' found instead" % next_line)

        section_name = next_line[1:]

        if section_name == "MeshFormat":
            line_count = 0
            while True:
                next_line = feeder.get_next_line()
                if next_line == "$End"+section_name:
                    break

                if line_count == 0:
                    version_number, file_type, data_size = next_line.split()

                if line_count > 0:
                    raise GmshFileFormatError("more than one line found in MeshFormat section")

                if version_number not in ["2.1", "2.2"]:
                    from warnings import warn
                    warn("unexpected mesh version number '%s' found" % version_number)

                if file_type != "0":
                    raise GmshFileFormatError("only ASCII gmsh file type is supported")

                line_count += 1

        elif section_name == "Nodes":
            node_count = int(feeder.get_next_line())
            node_idx = 1

            while True:
                next_line = feeder.get_next_line()
                if next_line == "$End"+section_name:
                    break

                parts = next_line.split()
                if len(parts) != 4:
                    raise GmshFileFormatError("expected four-component line in $Nodes section")

                read_node_idx = int(parts[0])
                if read_node_idx != node_idx:
                    raise GmshFileFormatError("out-of-order node index found")

                if force_dimension is not None:
                    point = [float(x) for x in parts[1:force_dimension+1]]
                else:
                    point = [float(x) for x in parts[1:]]

                nodes.append(numpy.array(point, dtype=numpy.float64))

                node_idx += 1

            if node_count+1 != node_idx:
                raise GmshFileFormatError("unexpected number of nodes found")

        elif section_name == "Elements":
            element_count = int(feeder.get_next_line())
            element_idx = 1
            while True:
                next_line = feeder.get_next_line()
                if next_line == "$End"+section_name:
                    break

                parts = [int(x) for x in next_line.split()]

                if len(parts) < 4:
                    raise GmshFileFormatError("too few entries in element line")

                read_element_idx = parts[0]
                if read_element_idx != element_idx:
                    raise GmshFileFormatError("out-of-order node index found")

                el_type_num = parts[1]
                try:
                    element_type = element_type_map[el_type_num]
                except KeyError:
                    raise GmshFileFormatError("unexpected element type %d"
                            % el_type_num)

                tag_count = parts[2]
                tags = parts[3:3+tag_count]

                # convert to zero-based
                node_indices = [x-1 for x in parts[3+tag_count:]]

                if element_type.node_count()!= len(node_indices):
                    raise GmshFileFormatError("unexpected number of nodes in element")

                gmsh_vertex_nrs = node_indices[:element_type.vertex_count]
                zero_based_idx = element_idx - 1
                el_info = ElementInfo(
                    index=zero_based_idx,
                    el_type=element_type,
                    node_indices=node_indices,
                    gmsh_vertex_indices=gmsh_vertex_nrs,
                    tag_numbers=[tag for tag in tags[:1] if tag != 0])

                gmsh_vertex_nrs_to_element[frozenset(gmsh_vertex_nrs)] = el_info
                element_idx +=1
            if element_count+1 != element_idx:
                raise GmshFileFormatError("unexpected number of elements found")

        elif section_name == "PhysicalNames":
            name_count = int(feeder.get_next_line())
            name_idx = 1

            while True:
                next_line = feeder.get_next_line()
                if next_line == "$End"+section_name:
                    break

                dimension, number, name = next_line.split(" ", 2)
                dimension = int(dimension)
                number = int(number)

                if not name[0] == '"' or not name[-1] == '"':
                    raise GmshFileFormatError("expected quotes around physical name")

                tag_name_map[number, dimension] = tag_mapper(name[1:-1])

                name_idx +=1

            if name_count+1 != name_idx:
                raise GmshFileFormatError("unexpected number of physical names found")
        else:
            # unrecognized section, skip
            while True:
                next_line = feeder.get_next_line()
                if next_line == "$End"+section_name:
                    break

    # figure out dimensionalities
    node_dim = single_valued(len(node) for node in nodes)
    vol_dim = max(el.el_type.dimensions for key, el in
            gmsh_vertex_nrs_to_element.iteritems() )
    bdry_dim = vol_dim - 1

    vol_elements = [el for key, el in gmsh_vertex_nrs_to_element.iteritems()
            if el.el_type.dimensions == vol_dim]
    bdry_elements = [el for key, el in gmsh_vertex_nrs_to_element.iteritems()
            if el.el_type.dimensions == bdry_dim]

    # build hedge-compatible elements
    from hedge.mesh.element import TO_CURVED_CLASS

    hedge_vertices = []
    hedge_elements = []

    gmsh_node_nr_to_hedge_vertex_nr = {}
    hedge_el_to_gmsh_element = {}

    def get_vertex_nr(gmsh_node_nr):
        try:
            return gmsh_node_nr_to_hedge_vertex_nr[gmsh_node_nr]
        except KeyError:
            hedge_vertex_nr = len(hedge_vertices)
            hedge_vertices.append(nodes[gmsh_node_nr])
            gmsh_node_nr_to_hedge_vertex_nr[gmsh_node_nr] = hedge_vertex_nr
            return hedge_vertex_nr

    for el_nr, gmsh_el in enumerate(vol_elements):
        el_map = LocalToGlobalMap(
                [nodes[ni] for ni in  gmsh_el.node_indices],
                gmsh_el.el_type)
        is_affine = el_map.is_affine()

        el_class = gmsh_el.el_type.geometry
        if not is_affine:
            try:
                el_class = TO_CURVED_CLASS[el_class]
            except KeyError:
                raise GmshFileFormatError("unsupported curved element type %s" % el_class)

        vertex_indices = [get_vertex_nr(gmsh_node_nr)
                for gmsh_node_nr in gmsh_el.gmsh_vertex_indices]

        if is_affine:
            hedge_el = el_class(el_nr, vertex_indices, hedge_vertices)
        else:
            hedge_el = el_class(el_nr, vertex_indices, el_map)

        hedge_elements.append(hedge_el)
        hedge_el_to_gmsh_element[hedge_el] = gmsh_el

    from pytools import reverse_dictionary
    hedge_vertex_nr_to_gmsh_node_nr = reverse_dictionary(
            gmsh_node_nr_to_hedge_vertex_nr)

    del vol_elements

    def volume_tagger(el, all_v):
        return [tag_name_map[tag_nr, el.dimensions]
                for tag_nr in hedge_el_to_gmsh_element[el].tag_numbers
                if (tag_nr, el.dimensions) in tag_name_map]

    def boundary_tagger(fvi, el, fn, all_v):
        gmsh_vertex_nrs = frozenset(
                hedge_vertex_nr_to_gmsh_node_nr[face_vertex_index]
                for face_vertex_index in fvi)

        try:
            gmsh_element = gmsh_vertex_nrs_to_element[gmsh_vertex_nrs]
        except KeyError:
            return []
        else:
            x = [tag_name_map[tag_nr, el.dimensions-1]
                    for tag_nr in gmsh_element.tag_numbers
                    if (tag_nr, el.dimensions-1) in tag_name_map]
            if len(x) > 1:
                from pudb import set_trace; set_trace()
            return x

    vertex_array = numpy.array(hedge_vertices, dtype=numpy.float64)
    pt_dim = vertex_array.shape[-1]
    if pt_dim != vol_dim:
        from warnings import warn
        warn("Found %d-dimensional mesh embedded in %d-dimensional space. "
                "Hedge only supports meshes of zero codimension (for now). "
                "Maybe you want to set force_dimension=%d?"
                % (vol_dim, pt_dim, vol_dim))

    from hedge.mesh import make_conformal_mesh_ext
    return make_conformal_mesh_ext(
            vertex_array,
            hedge_elements,
            boundary_tagger=boundary_tagger,
            volume_tagger=volume_tagger,
            periodicity=periodicity,
            allow_internal_boundaries=allow_internal_boundaries)