Пример #1
0
def split_variable(variable_ref, index, multiindices):
    """Splits a flexibly indexed variable along a concatenation index.

    :param variable_ref: flexibly indexed variable to split
    :param index: :py:class:`Concatenate` index to split along
    :param multiindices: one multiindex for each split variable

    :returns: generator of split indexed variables
    """
    assert isinstance(variable_ref, FlexiblyIndexed)
    other_indices = list(variable_ref.index_ordering())
    other_indices.remove(index)
    other_indices = tuple(other_indices)
    data = ComponentTensor(variable_ref, (index,) + other_indices)
    slices = [slice(None)] * len(other_indices)
    shapes = [(other_index.extent,) for other_index in other_indices]

    offset = 0
    for multiindex in multiindices:
        shape = tuple(index.extent for index in multiindex)
        size = numpy.prod(shape, dtype=int)
        slice_ = slice(offset, offset + size)
        offset += size

        sub_ref = Indexed(reshape(view(data, slice_, *slices),
                                  shape, *shapes),
                          multiindex + other_indices)
        sub_ref, = remove_componenttensors((sub_ref,))
        yield sub_ref
Пример #2
0
def split_variable(variable_ref, index, multiindices):
    """Splits a flexibly indexed variable along a concatenation index.

    :param variable_ref: flexibly indexed variable to split
    :param index: :py:class:`Concatenate` index to split along
    :param multiindices: one multiindex for each split variable

    :returns: generator of split indexed variables
    """
    assert isinstance(variable_ref, FlexiblyIndexed)
    other_indices = list(variable_ref.index_ordering())
    other_indices.remove(index)
    other_indices = tuple(other_indices)
    data = ComponentTensor(variable_ref, (index, ) + other_indices)
    slices = [slice(None)] * len(other_indices)
    shapes = [(other_index.extent, ) for other_index in other_indices]

    offset = 0
    for multiindex in multiindices:
        shape = tuple(index.extent for index in multiindex)
        size = numpy.prod(shape, dtype=int)
        slice_ = slice(offset, offset + size)
        offset += size

        sub_ref = Indexed(reshape(view(data, slice_, *slices), shape, *shapes),
                          multiindex + other_indices)
        sub_ref, = remove_componenttensors((sub_ref, ))
        yield sub_ref
Пример #3
0
def transfer_kernel(Pk, P1):
    """Compile a kernel that will map between Pk and P1.
    :returns: a PyOP2 kernel.

    The prolongation maps a solution in P1 into Pk using the natural
    embedding.  The restriction maps a residual in the dual of Pk into
    the dual of P1 (it is the dual of the prolongation), computed
    using linearity of the test function.
    """
    # Mapping of a residual in Pk into a residual in P1
    from coffee import base as coffee
    from tsfc.coffee import generate as generate_coffee, SCALAR_TYPE
    from tsfc.parameters import default_parameters
    from gem import gem, impero_utils as imp

    # Pk should be at least the same size as P1
    assert Pk.finat_element.space_dimension() >= P1.finat_element.space_dimension()
    # In the general case we should compute this by doing:
    # numpy.linalg.solve(Pkmass, PkP1mass)
    Pke = Pk.finat_element._element
    P1e = P1.finat_element._element
    # TODO, rework to use finat.
    matrix = numpy.dot(Pke.dual.to_riesz(P1e.get_nodal_basis()),
                       P1e.get_coeffs().T).T

    Vout, Vin = P1, Pk
    weights = gem.Literal(matrix)
    name = "Pk_P1_mapper"

    funargs = []

    assert Vin.shape == Vout.shape

    shape = (P1e.space_dimension() * Vout.value_size,
             Pke.space_dimension() * Vin.value_size)
    outarg = coffee.Decl(SCALAR_TYPE, coffee.Symbol("A", rank=shape))
    i = gem.Index()
    j = gem.Index()
    k = gem.Index()
    indices = i, j, k
    A = gem.Variable("A", shape)

    outgem = [gem.Indexed(gem.reshape(A,
                                      (P1e.space_dimension(), Vout.value_size),
                                      (Pke.space_dimension(), Vin.value_size)),
                          (i, k, j, k))]

    funargs.append(outarg)

    expr = gem.Indexed(weights, (i, j))

    outgem, = imp.preprocess_gem(outgem)
    ir = imp.compile_gem([(outgem, expr)], indices)

    index_names = [(i, "i"), (j, "j"), (k, "k")]
    body = generate_coffee(ir, index_names, default_parameters()["precision"])
    function = coffee.FunDecl("void", name, funargs, body,
                              pred=["static", "inline"])

    return op2.Kernel(function, name=function.name)
Пример #4
0
def transfer_kernel(Pk, P1):
    """Compile a kernel that will map between Pk and P1.
    :returns: a PyOP2 kernel.

    The prolongation maps a solution in P1 into Pk using the natural
    embedding.  The restriction maps a residual in the dual of Pk into
    the dual of P1 (it is the dual of the prolongation), computed
    using linearity of the test function.
    """
    # Mapping of a residual in Pk into a residual in P1
    from coffee import base as coffee
    from tsfc.coffee import generate as generate_coffee, SCALAR_TYPE
    from tsfc.parameters import default_parameters
    from gem import gem, impero_utils as imp

    # Pk should be at least the same size as P1
    assert Pk.finat_element.space_dimension() >= P1.finat_element.space_dimension()
    # In the general case we should compute this by doing:
    # numpy.linalg.solve(Pkmass, PkP1mass)
    Pke = Pk.finat_element._element
    P1e = P1.finat_element._element
    # TODO, rework to use finat.
    matrix = numpy.dot(Pke.dual.to_riesz(P1e.get_nodal_basis()),
                       P1e.get_coeffs().T).T

    Vout, Vin = P1, Pk
    weights = gem.Literal(matrix)
    name = "Pk_P1_mapper"

    funargs = []

    assert Vin.shape == Vout.shape

    shape = (P1e.space_dimension() * Vout.value_size,
             Pke.space_dimension() * Vin.value_size)
    outarg = coffee.Decl(SCALAR_TYPE, coffee.Symbol("A", rank=shape))
    i = gem.Index()
    j = gem.Index()
    k = gem.Index()
    indices = i, j, k
    A = gem.Variable("A", shape)

    outgem = [gem.Indexed(gem.reshape(A,
                                      (P1e.space_dimension(), Vout.value_size),
                                      (Pke.space_dimension(), Vin.value_size)),
                          (i, k, j, k))]

    funargs.append(outarg)

    expr = gem.Indexed(weights, (i, j))

    outgem, = imp.preprocess_gem(outgem)
    ir = imp.compile_gem([(outgem, expr)], indices)

    index_names = [(i, "i"), (j, "j"), (k, "k")]
    body = generate_coffee(ir, index_names, default_parameters()["precision"])
    function = coffee.FunDecl("void", name, funargs, body,
                              pred=["static", "inline"])

    return op2.Kernel(function, name=function.name)