Ejemplo n.º 1
0
    def __init__(self, ref_el, degree):
        entity_ids = {0: {0: [0], 1: [degree]}, 1: {0: list(range(1, degree))}}
        lr = quadrature.GaussLobattoLegendreQuadratureLineRule(
            ref_el, degree + 1)
        nodes = [functional.PointEvaluation(ref_el, x) for x in lr.pts]

        super(GaussLobattoLegendreDualSet,
              self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 2
0
    def __init__(self, ref_el, degree, right=True):
        # Do DG connectivity because it's bonkers to do one-sided assembly even
        # though we have an endpoint in the point set!
        entity_ids = {0: {0: [], 1: []}, 1: {0: list(range(0, degree + 1))}}
        lr = quadrature.RadauQuadratureLineRule(ref_el, degree + 1, right)
        nodes = [functional.PointEvaluation(ref_el, x) for x in lr.pts]

        super(GaussRadauDualSet, self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 3
0
    def __init__(self, ref_el):
        entity_ids = {}
        nodes = []
        cur = 0

        # make nodes by getting points
        # need to do this dimension-by-dimension, facet-by-facet
        top = ref_el.get_topology()
        verts = ref_el.get_vertices()
        sd = ref_el.get_spatial_dimension()

        # get jet at each vertex

        entity_ids[0] = {}
        for v in sorted(top[0]):
            nodes.append(functional.PointEvaluation(ref_el, verts[v]))
            pd = functional.PointDerivative
            for i in range(sd):
                alpha = [0] * sd
                alpha[i] = 1

                nodes.append(pd(ref_el, verts[v], alpha))

            entity_ids[0][v] = list(range(cur, cur + 1 + sd))
            cur += sd + 1

        # no edge dof
        entity_ids[1] = {}

        # face dof
        # point evaluation at barycenter
        entity_ids[2] = {}
        for f in sorted(top[2]):
            pt = ref_el.make_points(2, f, 3)[0]
            n = functional.PointEvaluation(ref_el, pt)
            nodes.append(n)
            entity_ids[2] = list(range(cur, cur + 1))
            cur += 1

        for dim in range(3, sd + 1):
            entity_ids[dim] = {}
            for facet in top[dim]:
                entity_ids[dim][facet] = []

        super(CubicHermiteDualSet, self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 4
0
    def __init__(self, ref_el, degree, order=1):
        bc_nodes = []
        for x in ref_el.get_vertices():
            bc_nodes.append([functional.PointEvaluation(ref_el, x),
                             *[functional.PointDerivative(ref_el, x, [alpha]) for alpha in range(1, order)]])
        bc_nodes[1].reverse()
        k = len(bc_nodes[0])
        idof = slice(k, -k)
        bdof = list(range(-k, k))
        bdof = bdof[k:] + bdof[:k]

        # Define the generalized eigenproblem on a GLL element
        gll = GaussLobattoLegendre(ref_el, degree)
        xhat = numpy.array([list(x.get_point_dict().keys())[0][0] for x in gll.dual_basis()])

        # Tabulate the BC nodes
        constraints = gll.tabulate(order-1, ref_el.get_vertices())
        C = numpy.column_stack(list(constraints.values()))
        perm = list(range(len(bdof)))
        perm = perm[::2] + perm[-1::-2]
        C = C[:, perm].T

        # Tabulate the basis that splits the DOFs into interior and bcs
        E = numpy.eye(gll.space_dimension())
        E[bdof, idof] = -C[:, idof]
        E[bdof, :] = numpy.dot(numpy.linalg.inv(C[:, bdof]), E[bdof, :])

        # Assemble the constrained Galerkin matrices on the reference cell
        rule = quadrature.GaussLegendreQuadratureLineRule(ref_el, degree+1)
        phi = gll.tabulate(order, rule.get_points())
        E0 = numpy.dot(phi[(0, )].T, E)
        Ek = numpy.dot(phi[(order, )].T, E)
        B = numpy.dot(numpy.multiply(E0.T, rule.get_weights()), E0)
        A = numpy.dot(numpy.multiply(Ek.T, rule.get_weights()), Ek)

        # Eigenfunctions in the constrained basis
        S = numpy.eye(A.shape[0])
        if S.shape[0] > len(bdof):
            _, Sii = sym_eig(A[idof, idof], B[idof, idof])
            S[idof, idof] = Sii
            S[idof, bdof] = numpy.dot(Sii, numpy.dot(Sii.T, -B[idof, bdof]))

        # Eigenfunctions in the Lagrange basis
        S = numpy.dot(E, S)
        self.gll_points = xhat
        self.gll_tabulation = S.T

        # Interpolate eigenfunctions onto the quadrature points
        basis = numpy.dot(S.T, phi[(0, )])
        nodes = bc_nodes[0] + [functional.IntegralMoment(ref_el, rule, f) for f in basis[idof]] + bc_nodes[1]

        entity_ids = {0: {0: [0], 1: [degree]},
                      1: {0: list(range(1, degree))}}
        entity_permutations = {}
        entity_permutations[0] = {0: {0: [0]}, 1: {0: [0]}}
        entity_permutations[1] = {0: make_entity_permutations(1, degree - 1)}
        super(FDMDual, self).__init__(nodes, ref_el, entity_ids, entity_permutations)
Ejemplo n.º 5
0
    def __init__(self, ref_el, degree):
        entity_ids = {0: {0: [], 1: []}, 1: {0: list(range(0, degree + 1))}}
        lr = quadrature.GaussLegendreQuadratureLineRule(ref_el, degree + 1)
        nodes = [functional.PointEvaluation(ref_el, x) for x in lr.pts]
        entity_permutations = {}
        entity_permutations[0] = {0: {0: []}, 1: {0: []}}
        entity_permutations[1] = {0: make_entity_permutations(1, degree + 1)}

        super(GaussLegendreDualSet, self).__init__(nodes, ref_el, entity_ids,
                                                   entity_permutations)
Ejemplo n.º 6
0
    def __init__(self, ref_el):
        entity_ids = {}
        nodes = []
        cur = 0

        # make nodes by getting points
        # need to do this dimension-by-dimension, facet-by-facet
        top = ref_el.get_topology()
        verts = ref_el.get_vertices()
        sd = ref_el.get_spatial_dimension()
        if ref_el.get_shape() != TRIANGLE:
            raise ValueError("Bell only defined on triangles")

        pd = functional.PointDerivative

        # get jet at each vertex

        entity_ids[0] = {}
        for v in sorted(top[0]):
            nodes.append(functional.PointEvaluation(ref_el, verts[v]))

            # first derivatives
            for i in range(sd):
                alpha = [0] * sd
                alpha[i] = 1
                nodes.append(pd(ref_el, verts[v], alpha))

            # second derivatives
            alphas = [[2, 0], [1, 1], [0, 2]]
            for alpha in alphas:
                nodes.append(pd(ref_el, verts[v], alpha))

            entity_ids[0][v] = list(range(cur, cur + 6))
            cur += 6

        # we need an edge quadrature rule for the moment
        from FIAT.quadrature_schemes import create_quadrature
        from FIAT.jacobi import eval_jacobi
        rline = ufc_simplex(1)
        q1d = create_quadrature(rline, 8)
        q1dpts = q1d.get_points()
        leg4_at_qpts = eval_jacobi(0, 0, 4, 2.0 * q1dpts - 1)

        imond = functional.IntegralMomentOfNormalDerivative
        entity_ids[1] = {}
        for e in sorted(top[1]):
            entity_ids[1][e] = [18 + e]
            nodes.append(imond(ref_el, e, q1d, leg4_at_qpts))

        entity_ids[2] = {0: []}

        super(BellDualSet, self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 7
0
    def __init__(self, ref_el, flat_el, degree):
        entity_ids = {}
        nodes = []

        # Change coordinates here.
        # Vertices of the simplex corresponding to the reference element.
        v_simplex = hypercube_simplex_map[flat_el].get_vertices()
        # Vertices of the reference element.
        v_hypercube = flat_el.get_vertices()
        # For the mapping, first two vertices are unchanged in all dimensions.
        v_ = [v_hypercube[0], v_hypercube[int(-0.5 * len(v_hypercube))]]

        # For dimension 1 upwards,
        # take the next vertex and map it to the midpoint of the edge/face it belongs to, and shares
        # with no other points.
        for d in range(1, flat_el.get_dimension()):
            v_.append(
                tuple(
                    np.asarray(
                        v_hypercube[flat_el.get_dimension() - d] +
                        np.average(np.asarray(v_hypercube[::2]), axis=0))))
        A, b = make_affine_mapping(
            v_simplex, tuple(v_))  # Make affine mapping to be used later.

        # make nodes by getting points
        # need to do this dimension-by-dimension, facet-by-facet
        top = hypercube_simplex_map[flat_el].get_topology()

        cur = 0
        for dim in sorted(top):
            for entity in sorted(top[dim]):
                pts_cur = hypercube_simplex_map[flat_el].make_points(
                    dim, entity, degree)
                pts_cur = [
                    tuple(np.matmul(A, np.array(x)) + b) for x in pts_cur
                ]
                nodes_cur = [
                    functional.PointEvaluation(flat_el, x) for x in pts_cur
                ]
                nnodes_cur = len(nodes_cur)
                nodes += nodes_cur
                cur += nnodes_cur

        cube_topology = ref_el.get_topology()
        for dim in sorted(cube_topology):
            entity_ids[dim] = {}
            for entity in sorted(cube_topology[dim]):
                entity_ids[dim][entity] = []

        entity_ids[dim][0] = list(range(len(nodes)))
        super(DPCDualSet, self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 8
0
    def __init__(self, ref_el):
        entity_ids = {}
        nodes = []
        cur = 0

        # make nodes by getting points
        # need to do this dimension-by-dimension, facet-by-facet
        top = ref_el.get_topology()
        verts = ref_el.get_vertices()
        sd = ref_el.get_spatial_dimension()
        if ref_el.get_shape() != TRIANGLE:
            raise ValueError("Argyris only defined on triangles")

        pd = functional.PointDerivative

        # get jet at each vertex

        entity_ids[0] = {}
        for v in sorted(top[0]):
            nodes.append(functional.PointEvaluation(ref_el, verts[v]))

            # first derivatives
            for i in range(sd):
                alpha = [0] * sd
                alpha[i] = 1
                nodes.append(pd(ref_el, verts[v], alpha))

            # second derivatives
            alphas = [[2, 0], [1, 1], [0, 2]]
            for alpha in alphas:
                nodes.append(pd(ref_el, verts[v], alpha))

            entity_ids[0][v] = list(range(cur, cur + 6))
            cur += 6

        # edge dof -- normal at each edge midpoint
        entity_ids[1] = {}
        for e in sorted(top[1]):
            pt = ref_el.make_points(1, e, 2)[0]
            n = functional.PointNormalDerivative(ref_el, e, pt)
            nodes.append(n)
            entity_ids[1][e] = [cur]
            cur += 1

        entity_ids[2] = {0: []}

        super(QuinticArgyrisDualSet, self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 9
0
    def __init__(self, ref_el):
        entity_ids = {}
        nodes = []
        vs = numpy.array(ref_el.get_vertices())
        bary = tuple(numpy.average(vs, 0))

        nodes = [functional.PointEvaluation(ref_el, bary)]
        entity_ids = {}
        top = ref_el.get_topology()
        for dim in sorted(top):
            entity_ids[dim] = {}
            for entity in sorted(top[dim]):
                entity_ids[dim][entity] = []

        entity_ids[dim] = {0: [0]}

        super(P0Dual, self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 10
0
    def __init__(self, ref_el, degree):
        entity_ids = {}
        nodes = []

        # make nodes by getting points
        # need to do this dimension-by-dimension, facet-by-facet
        top = ref_el.get_topology()

        cur = 0
        for dim in sorted(top):
            entity_ids[dim] = {}
            for entity in sorted(top[dim]):
                pts_cur = ref_el.make_points(dim, entity, degree)
                nodes_cur = [
                    functional.PointEvaluation(ref_el, x) for x in pts_cur
                ]
                nnodes_cur = len(nodes_cur)
                nodes += nodes_cur
                entity_ids[dim][entity] = list(range(cur, cur + nnodes_cur))
                cur += nnodes_cur

        super(LagrangeDualSet, self).__init__(nodes, ref_el, entity_ids)
Ejemplo n.º 11
0
    def __init__(self, cell, degree):

        # Get topology dictionary
        d = cell.get_spatial_dimension()
        topology = cell.get_topology()

        # Initialize empty nodes and entity_ids
        entity_ids = _initialize_entity_ids(topology)
        nodes = [None for i in list(topology[d - 1].keys())]

        # Construct nodes and entity_ids
        for i in topology[d - 1]:

            # Construct midpoint
            x = cell.make_points(d - 1, i, d)[0]

            # Degree of freedom number i is evaluation at midpoint
            nodes[i] = functional.PointEvaluation(cell, x)
            entity_ids[d - 1][i] += [i]

        # Initialize super-class
        super(CrouzeixRaviartDualSet, self).__init__(nodes, cell, entity_ids)
Ejemplo n.º 12
0
    def __init__(self, family, degree):
        assert(family == "Lobatto" or family == "Radau"), \
            "Unknown time element '%s'" % family
        if family == "Lobatto":
            assert (degree > 0), "Lobatto not defined for degree < 1!"
        else:
            assert (degree >= 0), "Degree must be >= 0"

        # ids is a map from mesh entity (numbers) to dof numbers
        ids = {}

        # dofs is a list of functionals
        dofs = []

        # Only defined in 1D (on an inteval)
        cell = reference_element.UFCInterval()

        self.coords = (ext.compute_lobatto_points(degree) if family
                       == "Lobatto" else ext.compute_radau_points(degree))
        points = [(c, ) for c in self.coords]

        # Create dofs from points
        dofs = [functional.PointEvaluation(cell, point) for point in points]

        # Create ids
        if family == "Lobatto":
            ids[0] = {0: [0], 1: [len(points) - 1]}
            ids[1] = {0: range(1, len(points) - 1)}
        elif family == "Radau":
            ids[0] = {0: [], 1: []}
            ids[1] = {
                0: range(len(points))
            }  # Treat all Radau points as internal
        else:
            error("Undefined family: %s" % family)

        # Initialize dual set
        dual_set.DualSet.__init__(self, dofs, cell, ids)
Ejemplo n.º 13
0
    def __init__(self, ref_el):
        entity_ids = {}
        nodes = []
        entity_permutations = {}
        vs = numpy.array(ref_el.get_vertices())
        if ref_el.get_dimension() == 0:
            bary = ()
        else:
            bary = tuple(numpy.average(vs, 0))

        nodes = [functional.PointEvaluation(ref_el, bary)]
        entity_ids = {}
        top = ref_el.get_topology()
        for dim in sorted(top):
            entity_ids[dim] = {}
            entity_permutations[dim] = {}
            sym_size = ref_el.symmetry_group_size(dim)
            if isinstance(dim, tuple):
                assert isinstance(sym_size, tuple)
                perms = {o: [] for o in numpy.ndindex(sym_size)}
            else:
                perms = {o: [] for o in range(sym_size)}
            for entity in sorted(top[dim]):
                entity_ids[dim][entity] = []
                entity_permutations[dim][entity] = perms
        entity_ids[dim] = {0: [0]}
        if isinstance(dim, tuple):
            entity_permutations[dim][0] = {
                o: [0]
                for o in numpy.ndindex(sym_size)
            }
        else:
            entity_permutations[dim][0] = {o: [0] for o in range(sym_size)}

        super(P0Dual, self).__init__(nodes, ref_el, entity_ids,
                                     entity_permutations)
Ejemplo n.º 14
0
    def __init__(self, A, B):
        # set up simple things
        order = min(A.get_order(), B.get_order())
        if A.get_formdegree() is None or B.get_formdegree() is None:
            formdegree = None
        else:
            formdegree = A.get_formdegree() + B.get_formdegree()

        # set up reference element
        ref_el = TensorProductCell(A.get_reference_element(),
                                   B.get_reference_element())

        if A.mapping()[0] != "affine" and B.mapping()[0] == "affine":
            mapping = A.mapping()[0]
        elif B.mapping()[0] != "affine" and A.mapping()[0] == "affine":
            mapping = B.mapping()[0]
        elif A.mapping()[0] == "affine" and B.mapping()[0] == "affine":
            mapping = "affine"
        else:
            raise ValueError(
                "check tensor product mappings - at least one must be affine")

        # set up entity_ids
        Adofs = A.entity_dofs()
        Bdofs = B.entity_dofs()
        Bsdim = B.space_dimension()
        entity_ids = {}

        for curAdim in Adofs:
            for curBdim in Bdofs:
                entity_ids[(curAdim, curBdim)] = {}
                dim_cur = 0
                for entityA in Adofs[curAdim]:
                    for entityB in Bdofs[curBdim]:
                        entity_ids[(curAdim, curBdim)][dim_cur] = \
                            [x*Bsdim + y for x in Adofs[curAdim][entityA]
                                for y in Bdofs[curBdim][entityB]]
                        dim_cur += 1

        # set up dual basis
        Anodes = A.dual_basis()
        Bnodes = B.dual_basis()

        # build the dual set by inspecting the current dual
        # sets item by item.
        # Currently supported cases:
        # PointEval x PointEval = PointEval [scalar x scalar = scalar]
        # PointScaledNormalEval x PointEval = PointScaledNormalEval [vector x scalar = vector]
        # ComponentPointEvaluation x PointEval [vector x scalar = vector]
        nodes = []
        for Anode in Anodes:
            if isinstance(Anode, functional.PointEvaluation):
                for Bnode in Bnodes:
                    if isinstance(Bnode, functional.PointEvaluation):
                        # case: PointEval x PointEval
                        # the PointEval functional just requires the
                        # coordinates. these are currently stored as
                        # the key of a one-item dictionary. we retrieve
                        # these by calling get_point_dict(), and
                        # use the concatenation to make a new PointEval
                        nodes.append(
                            functional.PointEvaluation(
                                ref_el,
                                _first_point(Anode) + _first_point(Bnode)))
                    elif isinstance(Bnode, functional.IntegralMoment):
                        # dummy functional for product with integral moments
                        nodes.append(
                            functional.Functional(None, None, None, {},
                                                  "Undefined"))
                    elif isinstance(Bnode, functional.PointDerivative):
                        # dummy functional for product with point derivative
                        nodes.append(
                            functional.Functional(None, None, None, {},
                                                  "Undefined"))
                    else:
                        raise NotImplementedError(
                            "unsupported functional type")

            elif isinstance(Anode, functional.PointScaledNormalEvaluation):
                for Bnode in Bnodes:
                    if isinstance(Bnode, functional.PointEvaluation):
                        # case: PointScaledNormalEval x PointEval
                        # this could be wrong if the second shape
                        # has spatial dimension >1, since we are not
                        # explicitly scaling by facet size
                        if len(_first_point(Bnode)) > 1:
                            # TODO: support this case one day
                            raise NotImplementedError(
                                "PointScaledNormalEval x PointEval is not yet supported if the second shape has dimension > 1"
                            )
                        # We cannot make a new functional.PSNEval in
                        # the natural way, since it tries to compute
                        # the normal vector by itself.
                        # Instead, we create things manually, and
                        # call Functional() with these arguments
                        sd = ref_el.get_spatial_dimension()
                        # The pt_dict is a one-item dictionary containing
                        # the details of the functional.
                        # The key is the spatial coordinate, which
                        # is just a concatenation of the two parts.
                        # The value is a list of tuples, representing
                        # the normal vector (scaled by the volume of
                        # the facet) at that point.
                        # Each tuple looks like (foo, (i,)); the i'th
                        # component of the scaled normal is foo.

                        # The following line is only valid when the second
                        # shape has spatial dimension 1 (enforced above)
                        Apoint, Avalue = _first_point_pair(Anode)
                        pt_dict = {
                            Apoint + _first_point(Bnode):
                            Avalue + [(0.0, (len(Apoint), ))]
                        }

                        # The following line should be used in the
                        # general case
                        # pt_dict = {Anode.get_point_dict().keys()[0] + Bnode.get_point_dict().keys()[0]: Anode.get_point_dict().values()[0] + [(0.0, (ii,)) for ii in range(len(Anode.get_point_dict().keys()[0]), len(Anode.get_point_dict().keys()[0]) + len(Bnode.get_point_dict().keys()[0]))]}

                        # THE FOLLOWING IS PROBABLY CORRECT BUT UNTESTED
                        shp = (sd, )
                        nodes.append(
                            functional.Functional(ref_el, shp, pt_dict, {},
                                                  "PointScaledNormalEval"))
                    else:
                        raise NotImplementedError(
                            "unsupported functional type")

            elif isinstance(Anode, functional.PointEdgeTangentEvaluation):
                for Bnode in Bnodes:
                    if isinstance(Bnode, functional.PointEvaluation):
                        # case: PointEdgeTangentEval x PointEval
                        # this is very similar to the case above, so comments omitted
                        if len(_first_point(Bnode)) > 1:
                            raise NotImplementedError(
                                "PointEdgeTangentEval x PointEval is not yet supported if the second shape has dimension > 1"
                            )
                        sd = ref_el.get_spatial_dimension()
                        Apoint, Avalue = _first_point_pair(Anode)
                        pt_dict = {
                            Apoint + _first_point(Bnode):
                            Avalue + [(0.0, (len(Apoint), ))]
                        }

                        # THE FOLLOWING IS PROBABLY CORRECT BUT UNTESTED
                        shp = (sd, )
                        nodes.append(
                            functional.Functional(ref_el, shp, pt_dict, {},
                                                  "PointEdgeTangent"))
                    else:
                        raise NotImplementedError(
                            "unsupported functional type")

            elif isinstance(Anode, functional.ComponentPointEvaluation):
                for Bnode in Bnodes:
                    if isinstance(Bnode, functional.PointEvaluation):
                        # case: ComponentPointEval x PointEval
                        # the CptPointEval functional requires the component
                        # and the coordinates. very similar to PE x PE case.
                        sd = ref_el.get_spatial_dimension()
                        nodes.append(
                            functional.ComponentPointEvaluation(
                                ref_el, Anode.comp, (sd, ),
                                _first_point(Anode) + _first_point(Bnode)))
                    else:
                        raise NotImplementedError(
                            "unsupported functional type")

            elif isinstance(Anode, functional.FrobeniusIntegralMoment):
                for Bnode in Bnodes:
                    if isinstance(Bnode, functional.PointEvaluation):
                        # case: FroIntMom x PointEval
                        sd = ref_el.get_spatial_dimension()
                        pt_dict = {}
                        pt_old = Anode.get_point_dict()
                        for pt in pt_old:
                            pt_dict[pt + _first_point(Bnode)] = pt_old[pt] + [
                                (0.0, sd - 1)
                            ]
                        # THE FOLLOWING IS PROBABLY CORRECT BUT UNTESTED
                        shp = (sd, )
                        nodes.append(
                            functional.Functional(ref_el, shp, pt_dict, {},
                                                  "FrobeniusIntegralMoment"))
                    else:
                        raise NotImplementedError(
                            "unsupported functional type")

            elif isinstance(Anode, functional.IntegralMoment):
                for Bnode in Bnodes:
                    if isinstance(Bnode, functional.PointEvaluation):
                        # case: IntMom x PointEval
                        sd = ref_el.get_spatial_dimension()
                        pt_dict = {}
                        pt_old = Anode.get_point_dict()
                        for pt in pt_old:
                            pt_dict[pt + _first_point(Bnode)] = pt_old[pt]
                        # THE FOLLOWING IS PROBABLY CORRECT BUT UNTESTED
                        shp = (sd, )
                        nodes.append(
                            functional.Functional(ref_el, shp, pt_dict, {},
                                                  "IntegralMoment"))
                    else:
                        raise NotImplementedError(
                            "unsupported functional type")

            elif isinstance(Anode, functional.Functional):
                # this should catch everything else
                for Bnode in Bnodes:
                    nodes.append(
                        functional.Functional(None, None, None, {},
                                              "Undefined"))
            else:
                raise NotImplementedError("unsupported functional type")

        dual = dual_set.DualSet(nodes, ref_el, entity_ids)

        super(TensorProductElement, self).__init__(ref_el, dual, order,
                                                   formdegree, mapping)
        # Set up constituent elements
        self.A = A
        self.B = B

        # degree for quadrature rule
        self.polydegree = max(A.degree(), B.degree())