Ejemplo n.º 1
0
    def _resample_matrix(self, actx: ArrayContext, to_group_index,
                         ibatch_index):
        import modepy as mp
        ibatch = self.groups[to_group_index].batches[ibatch_index]
        from_grp = self.from_discr.groups[ibatch.from_group_index]

        nfrom_unit_nodes = from_grp.unit_nodes.shape[1]
        if np.array_equal(from_grp.unit_nodes, ibatch.result_unit_nodes):
            # Nodes are exactly identical? We can 'interpolate' even when there
            # isn't a basis.

            result = np.eye(nfrom_unit_nodes)

        else:
            if len(from_grp.basis()) != nfrom_unit_nodes:
                from meshmode.discretization import NoninterpolatoryElementGroupError
                raise NoninterpolatoryElementGroupError(
                    "%s does not support interpolation because it is not "
                    "unisolvent (its unit node count does not match its "
                    "number of basis functions). Using connections requires "
                    "the ability to interpolate." % type(from_grp).__name__)

            result = mp.resampling_matrix(from_grp.basis(),
                                          ibatch.result_unit_nodes,
                                          from_grp.unit_nodes)

        return actx.freeze(actx.from_numpy(result))
Ejemplo n.º 2
0
    def _resample_point_pick_indices(self, actx: ArrayContext,
            to_group_index, ibatch_index,
            tol_multiplier=None):
        """If :meth:`_resample_matrix` *R* is a row subset of a permutation matrix *P*,
        return the index subset I so that, loosely, ``x[I] == R @ x``.

        Will return *None* if no such index array exists, or a
        :class:`pyopencl.array.Array` containing the index subset.
        """

        mat = actx.to_numpy(actx.thaw(
                self._resample_matrix(actx, to_group_index, ibatch_index)))

        nrows, ncols = mat.shape
        result = np.zeros(nrows, dtype=self.to_discr.mesh.element_id_dtype)

        if tol_multiplier is None:
            tol_multiplier = 50

        tol = np.finfo(mat.dtype).eps * tol_multiplier

        for irow in range(nrows):
            one_indices, = np.where(np.abs(mat[irow] - 1) < tol)
            zero_indices, = np.where(np.abs(mat[irow]) < tol)

            if len(one_indices) != 1:
                return None
            if len(zero_indices) != ncols - 1:
                return None

            one_index, = one_indices
            result[irow] = one_index

        return actx.freeze(actx.from_numpy(result))
Ejemplo n.º 3
0
def _unflatten(
        actx: ArrayContext, group_shapes: Iterable[Tuple[int, int]], ary: Any
        ) -> DOFArray:
    @memoize_in(actx, (unflatten, "unflatten_prg"))
    def prg():
        return make_loopy_program(
            "{[iel,idof]: 0<=iel<nelements and 0<=idof<ndofs_per_element}",
            "result[iel, idof] = ary[grp_start + iel*ndofs_per_element + idof]",
            name="unflatten")

    group_sizes = [nel * ndof for nel, ndof in group_shapes]

    if ary.size != sum(group_sizes):
        raise ValueError("array has size %d, expected %d"
                % (ary.size, sum(group_sizes)))

    group_starts = np.cumsum([0] + group_sizes)

    return DOFArray(actx, tuple(
        actx.call_loopy(
            prg(),
            grp_start=grp_start, ary=ary,
            nelements=nel,
            ndofs_per_element=ndof,
            )["result"]
        for grp_start, (nel, ndof) in zip(group_starts, group_shapes)))
Ejemplo n.º 4
0
def check_connection(actx: ArrayContext,
                     connection: DirectDiscretizationConnection):
    from_discr = connection.from_discr
    to_discr = connection.to_discr

    assert len(connection.groups) == len(to_discr.groups)

    for cgrp, tgrp in zip(connection.groups, to_discr.groups):
        for batch in cgrp.batches:
            fgrp = from_discr.groups[batch.from_group_index]

            from_element_indices = actx.to_numpy(
                actx.thaw(batch.from_element_indices))
            to_element_indices = actx.to_numpy(
                actx.thaw(batch.to_element_indices))

            assert (0 <= from_element_indices).all()
            assert (0 <= to_element_indices).all()
            assert (from_element_indices < fgrp.nelements).all()
            assert (to_element_indices < tgrp.nelements).all()
            if batch.to_element_face is not None:
                assert 0 <= batch.to_element_face < fgrp.mesh_el_group.nfaces
Ejemplo n.º 5
0
def thaw(actx: ArrayContext, ary: Union[DOFArray, np.ndarray]) -> np.ndarray:
    r"""Call :meth:`~meshmode.array_context.ArrayContext.thaw` on the element
    group arrays making up the :class:`DOFArray`, using *actx*.

    Vectorizes over object arrays of :class:`DOFArray`\ s.
    """
    if isinstance(ary, np.ndarray):
        return obj_array_vectorize(partial(thaw, actx), ary)

    if ary.array_context is not None:
        raise ValueError("DOFArray passed to thaw is not frozen")

    return DOFArray(actx, tuple(actx.thaw(subary) for subary in ary))
Ejemplo n.º 6
0
def unflatten(
        actx: ArrayContext,
        discr,
        ary,
        ndofs_per_element_per_group: Optional[Iterable[int]] = None
) -> np.ndarray:
    r"""Convert a 'flat' array returned by :func:`flatten` back to a :class:`DOFArray`.

    Vectorizes over object arrays of :class:`DOFArray`\ s.
    """
    if (isinstance(ary, np.ndarray) and ary.dtype.char == "O"
            and not isinstance(ary, DOFArray)):
        return obj_array_vectorize(
            lambda subary: unflatten(actx, discr, subary,
                                     ndofs_per_element_per_group), ary)

    @memoize_in(actx, (unflatten, "unflatten_prg"))
    def prg():
        return make_loopy_program(
            "{[iel,idof]: 0<=iel<nelements and 0<=idof<ndofs_per_element}",
            "result[iel, idof] = ary[grp_start + iel*ndofs_per_element + idof]",
            name="unflatten")

    if ndofs_per_element_per_group is None:
        ndofs_per_element_per_group = [grp.nunit_dofs for grp in discr.groups]

    group_sizes = [
        grp.nelements * ndofs_per_element for grp, ndofs_per_element in zip(
            discr.groups, ndofs_per_element_per_group)
    ]

    if ary.size != sum(group_sizes):
        raise ValueError("array has size %d, expected %d" %
                         (ary.size, sum(group_sizes)))

    group_starts = np.cumsum([0] + group_sizes)

    return DOFArray.from_list(actx, [
        actx.call_loopy(
            prg(),
            grp_start=grp_start,
            ary=ary,
            nelements=grp.nelements,
            ndofs_per_element=ndofs_per_element,
        )["result"] for grp_start, grp, ndofs_per_element in zip(
            group_starts, discr.groups, ndofs_per_element_per_group)
    ])