예제 #1
0
파일: access.py 프로젝트: FEniCS/ffc
    def facet_coordinate(self, e, mt, tabledata, num_points):
        L = self.language
        if mt.global_derivatives:
            error("Not expecting derivatives of FacetCoordinate.")
        if mt.local_derivatives:
            error("Not expecting derivatives of FacetCoordinate.")
        if mt.averaged:
            error("Not expecting average of FacetCoordinate.")
        if mt.restriction:
            error("Not expecting restriction of FacetCoordinate.")

        if self.integral_type in ("interior_facet", "exterior_facet"):
            tdim, = mt.terminal.ufl_shape
            if tdim == 0:
                error("Vertices have no facet coordinates.")
            elif tdim == 1:
                # 0D vertex coordinate
                warning("Vertex coordinate is always 0, should get rid of this in ufl geometry lowering.")
                return L.LiteralFloat(0.0)
            Xf = self.points_table(num_points)
            iq = self.symbols.quadrature_loop_index()
            assert 0 <= mt.flat_component < (tdim-1)
            if num_points == 1:
                index = mt.flat_component
            elif tdim == 2:
                index = iq
            else:
                index = iq * (tdim - 1) + mt.flat_component
            return Xf[index]
        else:
            # Xf should be computed from X or x symbolically instead of getting here
            error("Expecting reference facet coordinate to be symbolically rewritten.")
예제 #2
0
파일: plot.py 프로젝트: doru1004/FFC
def plot(element, rotate=True):
    "Plot finite element."

    # Check if Soya3D has been imported
    if not _soya_imported:
        warning("Unable to plot element, Soya3D not available (install package python-soya).")
        return

    # Special case: plot dof notation
    if element == "notation":

        # Create model for notation
        notation = create_notation_models()

        # Render plot window
        render(notation, "Notation", 0, True, rotate)

    else:

        # Create cell model
        cell, is3d = create_cell_model(element)

        cellname = element.cell().cellname() # Assuming single cell

        # Create dof models
        dofs, num_moments = create_dof_models(element)

        # Create title
        if element.degree() is not None:
            title = "%s of degree %d on a %s" % (element.family(), element.degree(), cellname)
        else:
            title = "%s on a %s" % (element.family(), cellname)

        # Render plot window
        render([cell] + dofs, title, num_moments, is3d, rotate)
예제 #3
0
def plot(element, rotate=True):
    "Plot finite element."

    # Check if Soya3D has been imported
    if not _soya_imported:
        warning("Unable to plot element, Soya3D not available (install package python-soya).")
        return

    # Special case: plot dof notation
    if element == "notation":

        # Create model for notation
        notation = create_notation_models()

        # Render plot window
        render(notation, "Notation", 0, True, rotate)

    else:

        # Create cell model
        cell, is3d = create_cell_model(element)

        # Create dof models
        dofs, num_moments = create_dof_models(element)

        # Create title
        if element.degree() is not None:
            title = "%s of degree %d on a %s" % (element.family(), element.degree(), element.cell().cellname())
        else:
            title = "%s on a %s" % (element.family(), element.cell().cellname())

        # Render plot window
        render([cell] + dofs, title, num_moments, is3d, rotate)
예제 #4
0
def _check_parameters(parameters):
    "Initial check of parameters."
    if "blas" in parameters:
        warning("BLAS mode unavailable (will return in a future version).")
    if "quadrature_points" in parameters:
        warning("Option 'quadrature_points' has been replaced by 'quadrature_degree'.")
    return parameters
예제 #5
0
def _analyze_form(form, parameters):
    "Analyze form, returning form data."

    # Check that form is not empty
    if form.empty():
        error("Form (%s) seems to be zero: cannot compile it." % str(form))

    # Hack to override representation with environment variable
    forced_r = os.environ.get("FFC_FORCE_REPRESENTATION")
    if forced_r:
        warning(
            "representation:    forced by $FFC_FORCE_REPRESENTATION to '%s'" %
            forced_r)
        r = forced_r
    else:
        # Check representation parameters to figure out how to
        # preprocess
        r = _extract_representation_family(form, parameters)
    debug("Preprocessing form using '%s' representation family." % r)

    # Compute form metadata
    if r == "uflacs":
        # Temporary workaround to let uflacs have a different
        # preprocessing pipeline than the legacy quadrature
        # representation. This approach imposes a limitation that,
        # e.g. uflacs and qudrature, representations cannot be mixed
        # in the same form.
        from ufl.classes import Jacobian
        form_data = compute_form_data(form,
                                      do_apply_function_pullbacks=True,
                                      do_apply_integral_scaling=True,
                                      do_apply_geometry_lowering=True,
                                      preserve_geometry_types=(Jacobian, ),
                                      do_apply_restrictions=True)
    elif r == "tsfc":
        try:
            # TSFC provides compute_form_data wrapper using correct
            # kwargs
            from tsfc.ufl_utils import compute_form_data as tsfc_compute_form_data
        except ImportError:
            error(
                "Failed to import tsfc.ufl_utils.compute_form_data when asked "
                "for tsfc representation.")
        form_data = tsfc_compute_form_data(form)
    elif r == "quadrature":
        # quadrature representation
        form_data = compute_form_data(form)
    else:
        error("Unexpected representation family '%s' for form preprocessing." %
              r)

    info("")
    info(str(form_data))

    # Attach integral meta data
    _attach_integral_metadata(form_data, r, parameters)
    _validate_representation_choice(form_data, r)

    return form_data
예제 #6
0
파일: compiler.py 프로젝트: afqueiruga/ffc
def _check_parameters(parameters):
    "Initial check of parameters."
    if "blas" in parameters:
        warning("BLAS mode unavailable (will return in a future version).")
    if "quadrature_points" in parameters:
        warning(
            "Option 'quadrature_points' has been replaced by 'quadrature_degree'."
        )
    return parameters
예제 #7
0
파일: analysis.py 프로젝트: FEniCS/ffc
def _analyze_form(form, parameters):
    "Analyze form, returning form data."

    # Check that form is not empty
    if form.empty():
        error("Form (%s) seems to be zero: cannot compile it." % str(form))

    # Hack to override representation with environment variable
    forced_r = os.environ.get("FFC_FORCE_REPRESENTATION")
    if forced_r:
        warning("representation:    forced by $FFC_FORCE_REPRESENTATION to '%s'" % forced_r)
        r = forced_r
    else:
        # Check representation parameters to figure out how to
        # preprocess
        r = _extract_representation_family(form, parameters)
    debug("Preprocessing form using '%s' representation family." % r)

    # Compute form metadata
    if r == "uflacs":
        # Temporary workaround to let uflacs have a different
        # preprocessing pipeline than the legacy quadrature
        # representation. This approach imposes a limitation that,
        # e.g. uflacs and qudrature, representations cannot be mixed
        # in the same form.
        from ufl.classes import Jacobian
        form_data = compute_form_data(form,
                                      do_apply_function_pullbacks=True,
                                      do_apply_integral_scaling=True,
                                      do_apply_geometry_lowering=True,
                                      preserve_geometry_types=(Jacobian,),
                                      do_apply_restrictions=True)
    elif r == "tsfc":
        try:
            # TSFC provides compute_form_data wrapper using correct
            # kwargs
            from tsfc.ufl_utils import compute_form_data as tsfc_compute_form_data
        except ImportError:
            error("Failed to import tsfc.ufl_utils.compute_form_data when asked "
                  "for tsfc representation.")
        form_data = tsfc_compute_form_data(form)
    elif r == "quadrature":
        # quadrature representation
        form_data = compute_form_data(form)
    else:
        error("Unexpected representation family '%s' for form preprocessing." % r)

    info("")
    info(str(form_data))

    # Attach integral meta data
    _attach_integral_metadata(form_data, r, parameters)
    _validate_representation_choice(form_data, r)

    return form_data
예제 #8
0
def optimize_integral_ir(ir, parameters):
    """
    Compute optimized intermediate representation of integral.

    Note that this function modifies the given intermediate
    representation directly, rather than working on a copy.
    """

    # Skip optimization if FErari is not installed
    if ferari is None:
        warning("FErari not installed, skipping tensor optimizations")
        return ir

    # Skip optimization if requested
    if "no_ferari" in parameters:
        warning("Skipping FErari optimizations as requested.")
        return ir

    # Extract data from intermediate representation
    AK = ir["AK"]
    integral_type = ir["integral_type"]
    num_facets = ir["num_facets"]
    rank = ir["rank"]

    # Optimize cell integrals
    if integral_type == "cell":
        for (k, (A0, GK, dummy)) in enumerate(AK):
            ir["AK"][k] = (A0, GK, _optimize_tensor_contraction(A0.A0, rank))

    # Optimize exterior facet integrals
    elif integral_type == "exterior_facet":
        for i in range(num_facets):
            for (k, (A0, GK, dummy)) in enumerate(AK[i]):
                ir["AK"][i][k] = (A0, GK,
                                  _optimize_tensor_contraction(A0.A0, rank))

    # Optimize interior facet integrals
    elif integral_type == "interior_facet":
        for i in range(num_facets):
            for j in range(num_facets):
                for (k, (A0, GK, dummy)) in enumerate(AK[i][j]):
                    ir["AK"][i][j][k] = (A0, GK,
                                         _optimize_tensor_contraction(
                                             A0.A0, rank))

    # Unhandled integral type
    else:
        error("Unhandled integral type: " + str(integral_type))

    return ir
예제 #9
0
def _check_parameters(parameters):
    "Initial check of parameters."
    if parameters is None:
        parameters = default_parameters()
    if "blas" in parameters:
        warning("BLAS mode unavailable (will return in a future version).")
    if "quadrature_points" in parameters:
        warning("Option 'quadrature_points' has been replaced by 'quadrature_degree'.")

    # HACK
    import os
    r = os.environ.get("FFC_FORCE_REPRESENTATION")
    if r: parameters["representation"] = r

    return parameters
def optimize_integral_ir(ir, parameters):
    """
    Compute optimized intermediate representation of integral.

    Note that this function modifies the given intermediate
    representation directly, rather than working on a copy.
    """

    # Skip optimization if FErari is not installed
    if ferari is None:
        warning("FErari not installed, skipping tensor optimizations")
        return ir

    # Skip optimization if requested
    if "no_ferari" in parameters:
        warning("Skipping FErari optimizations as requested.")
        return ir

    # Extract data from intermediate representation
    AK = ir["AK"]
    domain_type = ir["domain_type"]
    num_facets = ir["num_facets"]
    rank = ir["rank"]

    # Optimize cell integrals
    if domain_type == "cell":
        for (k, (A0, GK, dummy)) in enumerate(AK):
            ir["AK"][k] = (A0, GK, _optimize_tensor_contraction(A0.A0, rank))

    # Optimize exterior facet integrals
    elif domain_type == "exterior_facet":
        for i in range(num_facets):
            for (k, (A0, GK, dummy)) in enumerate(AK[i]):
                ir["AK"][i][k] = (A0, GK, _optimize_tensor_contraction(A0.A0, rank))

    # Optimize interior facet integrals
    elif domain_type == "interior_facet":
        for i in range(num_facets):
            for j in range(num_facets):
                for (k, (A0, GK, dummy)) in enumerate(AK[i][j]):
                    ir["AK"][i][j][k] = (A0, GK, _optimize_tensor_contraction(A0.A0, rank))

    # Unhandled integral type
    else:
        error("Unhandled integral type: " + str(domain_type))

    return ir
예제 #11
0
def generate_integral_code(ir, prefix, parameters):
    "Generate code for integral from intermediate representation."

    # Prefetch formatting to speedup code generation (well...)
    ret = format["return"]

    # Generate code
    code = initialize_integral_code(ir, prefix, parameters)
    code["num_cells"] = indent(ret(ir["num_cells"]), 4)
    code["tabulate_tensor"] = indent(_tabulate_tensor(ir, prefix, parameters), 4)
    code["additional_includes_set"] = ir["additional_includes_set"]

    precision = ir["integrals_metadata"].get("precision")
    if precision is not None and precision != parameters["precision"]:
        warning("Ignoring precision in integral metadata compiled "
                "using quadrature representation. Not implemented.")

    return code
def _optimize_tensor_contraction(A0, rank):
    "Compute optimized tensor contraction for given reference tensor."

    # Select FErari optimization algorithm
    if rank == 2:
        optimize = binary.optimize
    elif rank == 1:
        optimize = binary.optimize_action
    else:
        warning("Tensor optimization only available for rank 1 and 2 tensors, skipping optimizations")
        return None

    # Write a message
    info("Calling FErari to optimize tensor of size %s (%d entries)",
         " x ".join(str(d) for d in shape(A0)), product(shape(A0)))#

    # Compute optimized tensor contraction
    return optimize(A0)
예제 #13
0
def parse_optimise_parameters(parameters, itg_data):

    # Initialize parameters
    optimise_parameters = {
        "eliminate zeros": False,
        "optimisation": False,
        "ignore ones": False,
        "remove zero terms": False,
        "ignore zero tables": False
    }

    # Set optimized parameters
    if parameters["optimize"] and itg_data.integral_type == "custom":
        warning(
            "Optimization not available for custom integrals, skipping optimization."
        )
    elif parameters["optimize"]:
        optimise_parameters["ignore ones"] = True
        optimise_parameters["remove zero terms"] = True
        optimise_parameters["ignore zero tables"] = True

        # Do not include this in below if/else clause since we want to be
        # able to switch on this optimisation in addition to the other
        # optimisations.
        if "eliminate_zeros" in parameters:
            optimise_parameters["eliminate zeros"] = True

        if "simplify_expressions" in parameters:
            optimise_parameters["optimisation"] = "simplify_expressions"
        elif "precompute_ip_const" in parameters:
            optimise_parameters["optimisation"] = "precompute_ip_const"
        elif "precompute_basis_const" in parameters:
            optimise_parameters["optimisation"] = "precompute_basis_const"
        # The current default optimisation (for -O) is equal to
        # '-feliminate_zeros -fsimplify_expressions'.
        else:
            # If '-O -feliminate_zeros' was given on the command line, do not
            # simplify expressions
            if not "eliminate_zeros" in parameters:
                optimise_parameters["eliminate zeros"] = True
                optimise_parameters["optimisation"] = "simplify_expressions"

    return optimise_parameters
예제 #14
0
def generate_integral_code(ir, prefix, parameters):
    "Generate code for integral from intermediate representation."

    # Prefetch formatting to speedup code generation (well...)
    ret = format["return"]

    # Generate code
    code = initialize_integral_code(ir, prefix, parameters)
    code["num_cells"] = indent(ret(ir["num_cells"]), 4)
    code["tabulate_tensor"] = indent(_tabulate_tensor(ir, prefix, parameters),
                                     4)
    code["additional_includes_set"] = ir["additional_includes_set"]

    precision = ir["integrals_metadata"].get("precision")
    if precision is not None and precision != parameters["precision"]:
        warning("Ignoring precision in integral metadata compiled "
                "using quadrature representation. Not implemented.")

    return code
예제 #15
0
파일: plot.py 프로젝트: doru1004/FFC
def create_cell_model(element):
    "Create Soya3D model for cell."

    # Get color
    family = element.family()
    if not family in element_colors:
        warning("Don't know a good color for elements of type '%s', using default color." % family)
        family = "Lagrange"
    color = element_colors[family]
    color = (color[0], color[1], color[2], 0.7)

    # Create model based on cell type
    cellname = element.cell().cellname()
    if cellname == "triangle":
        return UnitTriangle(color), False
    elif cellname == "tetrahedron":
        return UnitTetrahedron(color), True

    error("Unable to plot element, unhandled cell type: %s" % str(cellname))
예제 #16
0
def create_cell_model(element):
    "Create Soya3D model for cell."

    # Get color
    family = element.family()
    if not family in element_colors:
        warning("Don't know a good color for elements of type '%s', using default color." % family)
        family = "Lagrange"
    color = element_colors[family]
    color = (color[0], color[1], color[2], 0.7)

    # Create model based on cell type
    cellname = element.cell().cellname()
    if cellname == "triangle":
        return UnitTriangle(color), False
    elif cellname == "tetrahedron":
        return UnitTetrahedron(color), True

    error("Unable to plot element, unhandled cell type: %s" % str(cellname))
예제 #17
0
def _optimize_tensor_contraction(A0, rank):
    "Compute optimized tensor contraction for given reference tensor."

    # Select FErari optimization algorithm
    if rank == 2:
        optimize = binary.optimize
    elif rank == 1:
        optimize = binary.optimize_action
    else:
        warning(
            "Tensor optimization only available for rank 1 and 2 tensors, skipping optimizations"
        )
        return None

    # Write a message
    info("Calling FErari to optimize tensor of size %s (%d entries)",
         " x ".join(str(d) for d in shape(A0)), product(shape(A0)))  #

    # Compute optimized tensor contraction
    return optimize(A0)
예제 #18
0
def parse_optimise_parameters(parameters, itg_data):

    # Initialize parameters
    optimise_parameters = {"eliminate zeros":     False,
                           "optimisation":        False,
                           "ignore ones":         False,
                           "remove zero terms":   False,
                           "ignore zero tables":  False}


    # Set optimized parameters
    if parameters["optimize"] and itg_data.integral_type == "custom":
        warning("Optimization not available for custom integrals, skipping optimization.")
    elif parameters["optimize"]:
        # Disable "ignore ones", because it is broken
        # optimise_parameters["ignore ones"]        = True
        optimise_parameters["remove zero terms"]  = True
        optimise_parameters["ignore zero tables"] = True

    return optimise_parameters
예제 #19
0
파일: parameters.py 프로젝트: FEniCS/ffc
def parse_optimise_parameters(parameters, itg_data):

    # Initialize parameters
    optimise_parameters = default_optimize_parameters()

    # Set optimized parameters
    if parameters["optimize"] and itg_data.integral_type in custom_integral_types:
        warning("Optimization not available for custom integrals, skipping optimization.")

    elif parameters["optimize"]:
        optimise_parameters["ignore ones"] = True
        optimise_parameters["remove zero terms"] = True
        optimise_parameters["ignore zero tables"] = True

        # Do not include this in below if/else clause since we want to
        # be able to switch on this optimisation in addition to the
        # other optimisations.
        if "eliminate_zeros" in parameters:
            optimise_parameters["eliminate zeros"] = True

        if "simplify_expressions" in parameters:
            optimise_parameters["optimisation"] = "simplify_expressions"
        elif "precompute_ip_const" in parameters:
            optimise_parameters["optimisation"] = "precompute_ip_const"
        elif "precompute_basis_const" in parameters:
            optimise_parameters["optimisation"] = "precompute_basis_const"
        # The current default optimisation (for -O) is equal to
        # '-feliminate_zeros -fsimplify_expressions'.
        else:
            # If '-O -feliminate_zeros' was given on the command line,
            # do not simplify expressions
            if "eliminate_zeros" not in parameters:
                optimise_parameters["eliminate zeros"] = True
                optimise_parameters["optimisation"] = "simplify_expressions"

    return optimise_parameters
예제 #20
0
파일: access.py 프로젝트: strekalloff/mpm
    def facet_coordinate(self, e, mt, tabledata, num_points):
        L = self.language
        if mt.global_derivatives:
            error("Not expecting derivatives of FacetCoordinate.")
        if mt.local_derivatives:
            error("Not expecting derivatives of FacetCoordinate.")
        if mt.averaged:
            error("Not expecting average of FacetCoordinate.")
        if mt.restriction:
            error("Not expecting restriction of FacetCoordinate.")

        if self.integral_type in ("interior_facet", "exterior_facet"):
            tdim, = mt.terminal.ufl_shape
            if tdim == 0:
                error("Vertices have no facet coordinates.")
            elif tdim == 1:
                # 0D vertex coordinate
                warning(
                    "Vertex coordinate is always 0, should get rid of this in ufl geometry lowering."
                )
                return L.LiteralFloat(0.0)
            Xf = self.points_table(num_points)
            iq = self.symbols.quadrature_loop_index()
            assert 0 <= mt.flat_component < (tdim - 1)
            if num_points == 1:
                index = mt.flat_component
            elif tdim == 2:
                index = iq
            else:
                index = iq * (tdim - 1) + mt.flat_component
            return Xf[index]
        else:
            # Xf should be computed from X or x symbolically instead of getting here
            error(
                "Expecting reference facet coordinate to be symbolically rewritten."
            )
예제 #21
0
def _tabulate_tensor(ir, prefix, parameters):
    "Generate code for a single integral (tabulate_tensor())."

    # Prefetch formatting to speedup code generation
    f_comment = format["comment"]
    f_G = format["geometry constant"]
    f_const_double = format["assign"]
    f_switch = format["switch"]
    f_float = format["float"]
    f_assign = format["assign"]
    f_A = format["element tensor"]
    f_r = format["free indices"][0]
    f_loop = format["generate loop"]
    f_int = format["int"]
    f_facet = format["facet"]

    # Get data
    opt_par = ir["optimise_parameters"]
    integral_type = ir["integral_type"]
    gdim = ir["geometric_dimension"]
    tdim = ir["topological_dimension"]
    num_facets = ir["num_facets"]
    num_vertices = ir["num_vertices"]
    prim_idims = ir["prim_idims"]
    integrals = ir["trans_integrals"]
    geo_consts = ir["geo_consts"]
    oriented = ir["needs_oriented"]
    element_data = ir["element_data"]
    num_cells = ir["num_cells"]

    # Create sets of used variables
    used_weights = set()
    used_psi_tables = set()
    used_nzcs = set()
    trans_set = set()
    sets = [used_weights, used_psi_tables, used_nzcs, trans_set]

    affine_tables = {}  # TODO: This is not populated anywhere, remove?
    quadrature_rules = ir["quadrature_rules"]

    operations = []
    if integral_type == "cell":

        # Generate code for computing element tensor
        tensor_code, mem_code, num_ops = _generate_element_tensor(integrals,
                                                                  sets,
                                                                  opt_par,
                                                                  gdim,
                                                                  tdim)
        tensor_code = "\n".join(tensor_code)

        # Set operations equal to num_ops (for printing info on operations).
        operations.append([num_ops])

        # Generate code for basic geometric quantities
        jacobi_code = ""
        jacobi_code += format["compute_jacobian"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["compute_jacobian_inverse"](tdim, gdim)
        if oriented:
            jacobi_code += format["orientation"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["scale factor snippet"]

        # Generate code for cell volume and circumradius
        jacobi_code += "\n\n" + format["generate cell volume"](tdim, gdim,
                                                               integral_type)
        jacobi_code += "\n\n" + format["generate circumradius"](tdim, gdim,
                                                                integral_type)

    elif integral_type == "exterior_facet":

        # Iterate over facets
        cases = [None for i in range(num_facets)]
        for i in range(num_facets):
            # Update transformer with facets and generate case code +
            # set of used geometry terms.
            c, mem_code, ops = _generate_element_tensor(integrals[i], sets,
                                                        opt_par, gdim, tdim)
            case = [f_comment("Total number of operations to compute element tensor (from this point): %d" % ops)]
            case += c
            cases[i] = "\n".join(case)

            # Save number of operations (for printing info on
            # operations).
            operations.append([i, ops])

        # Generate tensor code for all cases using a switch.
        tensor_code = f_switch(f_facet(None), cases)

        # Generate code for basic geometric quantities
        jacobi_code = ""
        jacobi_code += format["compute_jacobian"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["compute_jacobian_inverse"](tdim, gdim)
        if oriented:
            jacobi_code += format["orientation"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += "\n\n" + format["facet determinant"](tdim, gdim)
        jacobi_code += "\n\n" + format["generate normal"](tdim, gdim,
                                                          integral_type)
        jacobi_code += "\n\n" + format["generate facet area"](tdim, gdim)
        if tdim == 3:
            jacobi_code += "\n\n" + format["generate min facet edge length"](tdim, gdim)
            jacobi_code += "\n\n" + format["generate max facet edge length"](tdim, gdim)

        # Generate code for cell volume and circumradius
        jacobi_code += "\n\n" + format["generate cell volume"](tdim, gdim,
                                                               integral_type)
        jacobi_code += "\n\n" + format["generate circumradius"](tdim, gdim,
                                                                integral_type)

    elif integral_type == "interior_facet":

        # Modify the dimensions of the primary indices because we have
        # a macro element
        prim_idims = [d * 2 for d in prim_idims]

        # Iterate over combinations of facets
        cases = [[None for j in range(num_facets)] for i in range(num_facets)]
        for i in range(num_facets):
            for j in range(num_facets):
                # Update transformer with facets and generate case
                # code + set of used geometry terms.
                c, mem_code, ops = _generate_element_tensor(integrals[i][j],
                                                            sets, opt_par,
                                                            gdim, tdim)
                case = [f_comment("Total number of operations to compute element tensor (from this point): %d" % ops)]
                case += c
                cases[i][j] = "\n".join(case)

                # Save number of operations (for printing info on
                # operations).
                operations.append([i, j, ops])

        # Generate tensor code for all cases using a switch.
        tensor_code = f_switch(f_facet("+"),
                               [f_switch(f_facet("-"),
                                         cases[i]) for i in range(len(cases))])

        # Generate code for basic geometric quantities
        jacobi_code = ""
        for _r in ["+", "-"]:
            jacobi_code += format["compute_jacobian"](tdim, gdim, r=_r)
            jacobi_code += "\n"
            jacobi_code += format["compute_jacobian_inverse"](tdim, gdim, r=_r)
            if oriented:
                jacobi_code += format["orientation"](tdim, gdim, r=_r)
            jacobi_code += "\n"
        jacobi_code += "\n\n" + format["facet determinant"](tdim, gdim, r="+")
        jacobi_code += "\n\n" + format["generate normal"](tdim, gdim,
                                                          integral_type)
        jacobi_code += "\n\n" + format["generate facet area"](tdim, gdim)
        if tdim == 3:
            jacobi_code += "\n\n" + format["generate min facet edge length"](tdim, gdim, r="+")
            jacobi_code += "\n\n" + format["generate max facet edge length"](tdim, gdim, r="+")

        # Generate code for cell volume and circumradius
        jacobi_code += "\n\n" + format["generate cell volume"](tdim, gdim,
                                                               integral_type)
        jacobi_code += "\n\n" + format["generate circumradius"](tdim, gdim,
                                                                integral_type)

    elif integral_type == "vertex":

        # Iterate over vertices
        cases = [None for i in range(num_vertices)]
        for i in range(num_vertices):
            # Update transformer with vertices and generate case code
            # + set of used geometry terms.
            c, mem_code, ops = _generate_element_tensor(integrals[i],
                                                        sets,
                                                        opt_par,
                                                        gdim,
                                                        tdim)
            case = [f_comment("Total number of operations to compute element tensor (from this point): %d" % ops)]
            case += c
            cases[i] = "\n".join(case)

            # Save number of operations (for printing info on
            # operations).
            operations.append([i, ops])

        # Generate tensor code for all cases using a switch.
        tensor_code = f_switch(format["vertex"], cases)

        # Generate code for basic geometric quantities
        jacobi_code = ""
        jacobi_code += format["compute_jacobian"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["compute_jacobian_inverse"](tdim, gdim)
        if oriented:
            jacobi_code += format["orientation"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += "\n\n" + format["facet determinant"](tdim, gdim)  # FIXME: This is not defined in a point???

    elif integral_type in custom_integral_types:

        # Set number of cells
        if integral_type == "cutcell":
            num_cells = 1
        elif integral_type == "interface":
            num_cells = 2
        elif integral_type == "overlap":
            num_cells = 2

        # Warning that more than two cells in only partly supported.
        # The missing piece is to couple multiple cells to
        # restrictions other than '+' and '-'.
        if num_cells > 2:
            warning("Custom integrals with more than two cells only partly supported.")

        # Modify the dimensions of the primary indices because we have a macro element
        if num_cells == 2:
            prim_idims = [d * 2 for d in prim_idims]

        # Check whether we need to generate facet normals
        generate_custom_facet_normal = num_cells == 2

        # Generate code for computing element tensor
        tensor_code, mem_code, num_ops = _generate_element_tensor(integrals,
                                                                  sets,
                                                                  opt_par,
                                                                  gdim,
                                                                  tdim,
                                                                  generate_custom_facet_normal)

        tensor_code = "\n".join(tensor_code)

        # Set operations equal to num_ops (for printing info on
        # operations).
        operations.append([num_ops])

        # FIXME: Jacobi code is only needed when we use cell volume or
        # circumradius.
        # FIXME: Does not seem to be removed by removed_unused.

        # Generate code for basic geometric quantities
        jacobi_code = ""
        for i in range(num_cells):
            r = i if num_cells > 1 else None
            jacobi_code += "\n"
            jacobi_code += f_comment("--- Compute geometric quantities on cell %d ---" % i)
            jacobi_code += "\n\n"
            if num_cells > 1:
                jacobi_code += f_comment("Extract vertex coordinates\n")
                jacobi_code += format["extract_cell_coordinates"]((tdim + 1) * gdim * i, r=i)
                jacobi_code += "\n\n"
            jacobi_code += format["compute_jacobian"](tdim, gdim, r=r)
            jacobi_code += "\n"
            jacobi_code += format["compute_jacobian_inverse"](tdim, gdim, r=r)
            jacobi_code += "\n"
            jacobi_code += format["generate cell volume"](tdim, gdim,
                                                          integral_type,
                                                          r=r if num_cells > 1 else None)
            jacobi_code += "\n"
            jacobi_code += format["generate circumradius"](tdim, gdim,
                                                           integral_type,
                                                           r=r if num_cells > 1 else None)
            jacobi_code += "\n"

    else:
        error("Unhandled integral type: " + str(integral_type))

    # After we have generated the element code for all facets we can
    # remove the unused transformations.
    common = [remove_unused(jacobi_code, trans_set)]

    # FIXME: After introduction of custom integrals, the common code
    # here is not really common anymore. Think about how to
    # restructure this function.

    # Add common code except for custom integrals
    if integral_type not in custom_integral_types:
        common += _tabulate_weights([quadrature_rules[p] for p in sorted(used_weights)])

        # Add common code for updating tables
        name_map = ir["name_map"]
        tables = ir["unique_tables"]
        tables.update(affine_tables)  # TODO: This is not populated anywhere, remove?
        common += _tabulate_psis(tables, used_psi_tables, name_map, used_nzcs,
                                 opt_par, integral_type, gdim)

    # Add special tabulation code for custom integral
    else:
        common += _evaluate_basis_at_quadrature_points(used_psi_tables,
                                                       gdim,
                                                       element_data,
                                                       prefix,
                                                       num_vertices,
                                                       num_cells)

    # Reset the element tensor (array 'A' given as argument to
    # tabulate_tensor() by assembler)
    # Handle functionals.
    common += [f_comment("Reset values in the element tensor.")]
    if prim_idims == []:
        common += [f_assign(f_A(f_int(0)), f_float(0))]
    else:
        dim = functools.reduce(lambda v, u: v * u, prim_idims)
        common += f_loop([f_assign(f_A(f_r), f_float(0))], [(f_r, 0, dim)])

    # Create the constant geometry declarations (only generated if
    # simplify expressions are enabled).
    geo_ops, geo_code = generate_aux_constants(geo_consts, f_G, f_const_double)
    if geo_code:
        common += [f_comment("Number of operations to compute geometry constants: %d." % geo_ops)]
        common += [format["declaration"](format["float declaration"], f_G(len(geo_consts)))]
        common += geo_code

    # Add comments.
    common += ["", f_comment("Compute element tensor using UFL quadrature representation")]
    common += [f_comment("Optimisations: %s" % ", ".join([str((k, opt_par[k]))
                                                          for k in sorted(opt_par.keys())]))]

    for ops in operations:
        # Add geo ops count to integral ops count for writing info.
        if isinstance(ops[-1], int):
            ops[-1] += geo_ops

    return "\n".join(common) + "\n" + tensor_code
예제 #22
0
파일: analysis.py 프로젝트: FEniCS/ffc
def _determine_representation(integral_metadatas, ida, form_data, form_r_family,
                              parameters):
    "Determine one unique representation considering all integrals together."

    # Extract unique representation among these single-domain
    # integrals (Generating code with different representations within
    # a single tabulate_tensor is considered not worth the effort)
    representations  = set(md["representation"] for md in integral_metadatas
                           if md["representation"] != "auto")
    optimize_values  = set(md["optimize"] for md in integral_metadatas)
    precision_values = set(md["precision"] for md in integral_metadatas)

    if len(representations) > 1:
        error("Integral representation must be equal within each sub domain or 'auto', got %s." % (str(sorted(str(v) for v in representations)),))
    if len(optimize_values) > 1:
        error("Integral 'optimize' metadata must be equal within each sub domain or not set, got %s." % (str(sorted(str(v) for v in optimize_values)),))
    if len(precision_values) > 1:
        error("Integral 'precision' metadata must be equal within each sub domain or not set, got %s." % (str(sorted(str(v) for v in precision_values)),))

    # The one and only non-auto representation found, or get from parameters
    r, = representations  or (parameters["representation"],)
    o, = optimize_values  or (parameters["optimize"],)
    # FIXME: Default param value is zero which is not interpreted well by tsfc!
    p, = precision_values or (parameters["precision"],)

    # If it's still auto, try to determine which representation is
    # best for these integrals
    if r == "auto":
        # Find representations compatible with these integrals
        compatible = _find_compatible_representations(ida.integrals,
                                                      form_data.unique_sub_elements)
        # Pick the one compatible or default to uflacs
        if len(compatible) == 0:
            error("Found no representation capable of compiling this form.")
        elif len(compatible) == 1:
            r, = compatible
        else:
            # NOTE: Need to pick the same default as in
            # _extract_representation_family
            if form_r_family == "uflacs":
                r = "uflacs"
            elif form_r_family == "tsfc":
                r = "tsfc"
            elif form_r_family == "quadrature":
                r = "quadrature"
            else:
                error("Invalid form representation family %s." % (form_r_family,))
        info("representation:    auto --> %s" % r)
    else:
        info("representation:    %s" % r)

    if p is None:
        p = default_precision

    # Hack to override representation with environment variable
    forced_r = os.environ.get("FFC_FORCE_REPRESENTATION")
    if forced_r:
        r = forced_r
        warning("representation:    forced by $FFC_FORCE_REPRESENTATION to '%s'" % r)
        return r, o, p

    return r, o, p
예제 #23
0
파일: generator.py 프로젝트: FEniCS/ffc
    def generate_snippets(self, L, ir, parameters):
        "Generate code snippets for each keyword found in templates."
        snippets = {}
        for kw in self._keywords:
            handlerstr = "%s.%s" % (self.__class__.__name__, kw)

            # Check that attribute self.<keyword> is available
            if not hasattr(self, kw):
                error("Missing handler %s." % (handlerstr,))

            # Call self.<keyword>(*args) to get value to insert in snippets
            method = getattr(self, kw)
            vn = method.__code__.co_varnames[:method.__code__.co_argcount]
            file_line = "%s:%s" % (method.__code__.co_filename, method.__code__.co_firstlineno)

            #if handlerstr == "ufc_dofmap.create":
            #    import ipdb; ipdb.set_trace()

            # Always pass L
            assert vn[:2] == ("self", "L")
            vn = vn[2:]
            args = (L,)

            # Either pass full ir or extract ir value with keyword given by argument name
            if vn[0] == "ir":
                args += (ir,)
            elif vn[0] in ir:
                args += (ir[vn[0]],)
            else:
                error("Cannot find key '%s' in ir, argument to %s at %s." % (vn[0], handlerstr, file_line))
            vn = vn[1:]

            # Optionally pass parameters
            if vn == ("parameters",):
                args += (parameters,)
            elif vn:
                error("Invalid argument names %s to %s at %s." % (vn, handlerstr, file_line))

            # Call handler
            value = method(*args)


            if isinstance(value, list):
                value = L.StatementList(value)

            # Indent body and format to str
            if isinstance(value, L.CStatement):
                value = L.Indented(value.cs_format(precision=parameters["precision"]))
                value = format_indented_lines(value)
            elif not isinstance(value, str):
                error("Expecting code or string, not %s, returned from handler %s at %s." % (type(value), handlerstr, file_line))

            # Store formatted code in snippets dict
            snippets[kw] = value

        # Error checking (can detect some bugs early when changing the ufc interface)
        # Get all attributes of subclass class (skip "_foo")
        attrs = set(name for name in dir(self) if not (name.startswith("_") or name.startswith("generate")))
        # Get all attributes of this base class (skip "_foo" and "generate*")
        base_attrs = set(name for name in dir(ufc_generator) if not (name.startswith("_") or name.startswith("generate")))
        # The template keywords should not contain any names not among the class attributes
        missing = set(self._keywords) - attrs
        if missing:
            warning("*** Missing generator functions:\n%s" % ('\n'.join(map(str, sorted(missing))),))
        # The class attributes should not contain any names not among the template keywords
        # (this is strict, a useful check when changing ufc, but can be dropped)
        unused = attrs - set(self._keywords) - base_attrs
        if unused:
            warning("*** Unused generator functions:\n%s" % ('\n'.join(map(str, sorted(unused))),))

        # Return snippets, a dict of code strings
        return snippets
예제 #24
0
def compile_form(forms, object_names=None, prefix="Form", parameters=None):
    """This function generates UFC code for a given UFL form or list
    of UFL forms."""

    info("Compiling form %s\n" % prefix)

    # Reset timing
    cpu_time_0 = time()

    # Check input arguments
    forms = _check_forms(forms)
    if not forms:
        return
    if prefix != os.path.basename(prefix):
        prefix = os.path.basename(prefix)
        warning("Invalid prefix, modified to {}.".format(prefix))
    if object_names is None:
        object_names = {}
    parameters = _check_parameters(parameters)

    # Stage 1: analysis
    cpu_time = time()
    analysis = analyze_forms(forms, parameters)
    _print_timing(1, time() - cpu_time)

    # Stage 2: intermediate representation
    cpu_time = time()
    ir = compute_ir(analysis, prefix, parameters, object_names=object_names)
    _print_timing(2, time() - cpu_time)

    # Stage 3: optimization
    cpu_time = time()
    oir = optimize_ir(ir, parameters)
    _print_timing(3, time() - cpu_time)

    # Return IR (PyOP2 mode) or code string (otherwise)
    if parameters["pyop2-ir"]:
        try:
            from ffc.quadrature.quadraturepyop2ir import generate_pyop2_ir
        except ImportError:
            raise ImportError("Format pyop2-ir depends on PyOP2, which is not available.")
        # Stage 4: build PyOP2 intermediate representation

        cpu_time = time()
        #FIXME: need a cleaner interface
        pyop2_ir = [generate_pyop2_ir(ir, prefix, parameters) for ir in oir[3]]
        _print_timing(4, time() - cpu_time)

        info_green("FFC finished in %g seconds.", time() - cpu_time_0)
        return pyop2_ir

    else:
        # Stage 4: code generation
        cpu_time = time()
        code = generate_code(oir, prefix, parameters)
        _print_timing(4, time() - cpu_time)

        # Stage 4.1: generate wrappers
        cpu_time = time()
        wrapper_code = generate_wrapper_code(analysis, prefix, object_names, parameters)
        _print_timing(4.1, time() - cpu_time)

        # Stage 5: format code
        cpu_time = time()
        code_h, code_c = format_code(code, wrapper_code, prefix, parameters)
        write_code(code_h, code_c, prefix, parameters) # FIXME: Don't write to file in this function (issue #72)
        _print_timing(5, time() - cpu_time)

        info_green("FFC finished in %g seconds.", time() - cpu_time_0)

        return code
예제 #25
0
def tabulate_basis(sorted_integrals, form_data, itg_data):
    "Tabulate the basisfunctions and derivatives."

    # MER: Note to newbies: this code assumes that each integral in
    # the dictionary of sorted_integrals that enters here, has a
    # unique number of quadrature points ...

    # Initialise return values.
    quadrature_rules = {}
    psi_tables = {}
    integrals = {}
    avg_elements = {"cell": [], "facet": []}

    # Get some useful variables in short form
    integral_type = itg_data.integral_type
    cell = itg_data.domain.cell()

    # Create canonical ordering of quadrature rules
    rules = sorted(sorted_integrals.keys())

    # Loop the quadrature points and tabulate the basis values.
    for degree, scheme in rules:

        # --------- Creating quadrature rule
        # Make quadrature rule and get points and weights.
        (points, weights) = create_quadrature_points_and_weights(
            integral_type, cell, degree, scheme)

        # The TOTAL number of weights/points
        len_weights = None if weights is None else len(weights)

        # Add points and rules to dictionary
        ffc_assert(
            len_weights not in quadrature_rules,
            "This number of points is already present in the weight table: " +
            repr(quadrature_rules))
        quadrature_rules[len_weights] = (weights, points)

        # --------- Store integral
        # Add the integral with the number of points as a key to the return integrals.
        integral = sorted_integrals[(degree, scheme)]
        ffc_assert(len_weights not in integrals, \
                   "This number of points is already present in the integrals: " + repr(integrals))
        integrals[len_weights] = integral

        # --------- Analyse UFL elements in integral

        # Get all unique elements in integral.
        ufl_elements = [
            form_data.element_replace_map[e]
            for e in extract_unique_elements(integral)
        ]

        # Insert elements for x and J
        domain = integral.ufl_domain(
        )  # FIXME: For all domains to be sure? Better to rewrite though.
        x_element = domain.ufl_coordinate_element()
        if x_element not in ufl_elements:
            if integral_type == "custom":
                # FIXME: Not yet implemented, in progress
                warning(
                    "Vector elements not yet supported in custom integrals so element for coordinate function x will not be generated."
                )
            else:
                ufl_elements.append(x_element)

        # Find all CellAvg and FacetAvg in integrals and extract elements
        for avg, AvgType in (("cell", CellAvg), ("facet", FacetAvg)):
            expressions = extract_type(integral, AvgType)
            avg_elements[avg] = [
                form_data.element_replace_map[e] for expr in expressions
                for e in extract_unique_elements(expr)
            ]

        # Find the highest number of derivatives needed for each element
        num_derivatives = _find_element_derivatives(
            integral.integrand(), ufl_elements, form_data.element_replace_map)
        # Need at least 1 for the Jacobian
        num_derivatives[x_element] = max(num_derivatives.get(x_element, 0), 1)

        # --------- Evaluate FIAT elements in quadrature points and store in tables

        # Add the number of points to the psi tables dictionary.
        ffc_assert(len_weights not in psi_tables, \
                   "This number of points is already present in the psi table: " + repr(psi_tables))
        psi_tables[len_weights] = {}

        # Loop FIAT elements and tabulate basis as usual.
        for ufl_element in ufl_elements:
            fiat_element = create_element(ufl_element)

            # Tabulate table of basis functions and derivatives in points
            psi_table = _tabulate_psi_table(integral_type, cell, fiat_element,
                                            num_derivatives[ufl_element],
                                            points)

            # Insert table into dictionary based on UFL elements. (None=not averaged)
            psi_tables[len_weights][ufl_element] = {None: psi_table}

    # Loop over elements found in CellAvg and tabulate basis averages
    len_weights = 1
    for avg in ("cell", "facet"):
        # Doesn't matter if it's exterior or interior
        if avg == "cell":
            avg_integral_type = "cell"
        elif avg == "facet":
            avg_integral_type = "exterior_facet"

        for element in avg_elements[avg]:
            fiat_element = create_element(element)

            # Make quadrature rule and get points and weights.
            (points, weights) = create_quadrature_points_and_weights(
                avg_integral_type, cell, element.degree(), "default")
            wsum = sum(weights)

            # Tabulate table of basis functions and derivatives in points
            entity_psi_tables = _tabulate_psi_table(avg_integral_type, cell,
                                                    fiat_element, 0, points)
            rank = len(element.value_shape())

            # Hack, duplicating table with per-cell values for each facet in the case of cell_avg(f) in a facet integral
            actual_entities = _tabulate_entities(integral_type, cell)
            if len(actual_entities) > len(entity_psi_tables):
                assert len(entity_psi_tables) == 1
                assert avg_integral_type == "cell"
                assert "facet" in integral_type
                v, = sorted(entity_psi_tables.values())
                entity_psi_tables = dict((e, v) for e in actual_entities)

            for entity, deriv_table in sorted(entity_psi_tables.items()):
                deriv, = sorted(deriv_table.keys()
                                )  # Not expecting derivatives of averages
                psi_table = deriv_table[deriv]

                if rank:
                    # Compute numeric integral
                    num_dofs, num_components, num_points = psi_table.shape
                    ffc_assert(num_points == len(weights),
                               "Weights and table shape does not match.")
                    avg_psi_table = numpy.asarray(
                        [[[numpy.dot(psi_table[j, k, :], weights) / wsum]
                          for k in range(num_components)]
                         for j in range(num_dofs)])
                else:
                    # Compute numeric integral
                    num_dofs, num_points = psi_table.shape
                    ffc_assert(num_points == len(weights),
                               "Weights and table shape does not match.")
                    avg_psi_table = numpy.asarray(
                        [[numpy.dot(psi_table[j, :], weights) / wsum]
                         for j in range(num_dofs)])

                # Insert table into dictionary based on UFL elements.
                insert_nested_dict(psi_tables,
                                   (len_weights, element, avg, entity, deriv),
                                   avg_psi_table)

    return (integrals, psi_tables, quadrature_rules)
예제 #26
0
def _determine_representation(integral_metadatas, ida, form_data,
                              form_r_family, parameters):
    "Determine one unique representation considering all integrals together."

    # Extract unique representation among these single-domain
    # integrals (Generating code with different representations within
    # a single tabulate_tensor is considered not worth the effort)
    representations = set(md["representation"] for md in integral_metadatas
                          if md["representation"] != "auto")
    optimize_values = set(md["optimize"] for md in integral_metadatas)
    precision_values = set(md["precision"] for md in integral_metadatas)

    if len(representations) > 1:
        error(
            "Integral representation must be equal within each sub domain or 'auto', got %s."
            % (str(sorted(str(v) for v in representations)), ))
    if len(optimize_values) > 1:
        error(
            "Integral 'optimize' metadata must be equal within each sub domain or not set, got %s."
            % (str(sorted(str(v) for v in optimize_values)), ))
    if len(precision_values) > 1:
        error(
            "Integral 'precision' metadata must be equal within each sub domain or not set, got %s."
            % (str(sorted(str(v) for v in precision_values)), ))

    # The one and only non-auto representation found, or get from parameters
    r, = representations or (parameters["representation"], )
    o, = optimize_values or (parameters["optimize"], )
    # FIXME: Default param value is zero which is not interpreted well by tsfc!
    p, = precision_values or (parameters["precision"], )

    # If it's still auto, try to determine which representation is
    # best for these integrals
    if r == "auto":
        # Find representations compatible with these integrals
        compatible = _find_compatible_representations(
            ida.integrals, form_data.unique_sub_elements)
        # Pick the one compatible or default to uflacs
        if len(compatible) == 0:
            error("Found no representation capable of compiling this form.")
        elif len(compatible) == 1:
            r, = compatible
        else:
            # NOTE: Need to pick the same default as in
            # _extract_representation_family
            if form_r_family == "uflacs":
                r = "uflacs"
            elif form_r_family == "tsfc":
                r = "tsfc"
            elif form_r_family == "quadrature":
                r = "quadrature"
            else:
                error("Invalid form representation family %s." %
                      (form_r_family, ))
        info("representation:    auto --> %s" % r)
    else:
        info("representation:    %s" % r)

    if p is None:
        p = default_precision

    # Hack to override representation with environment variable
    forced_r = os.environ.get("FFC_FORCE_REPRESENTATION")
    if forced_r:
        r = forced_r
        warning(
            "representation:    forced by $FFC_FORCE_REPRESENTATION to '%s'" %
            r)
        return r, o, p

    return r, o, p
예제 #27
0
    def generate_snippets(self, L, ir, parameters):
        "Generate code snippets for each keyword found in templates."
        snippets = {}
        for kw in self._keywords:
            handlerstr = "%s.%s" % (self.__class__.__name__, kw)

            # Check that attribute self.<keyword> is available
            if not hasattr(self, kw):
                error("Missing handler %s." % (handlerstr, ))

            # Call self.<keyword>(*args) to get value to insert in snippets
            method = getattr(self, kw)
            vn = method.__code__.co_varnames[:method.__code__.co_argcount]
            file_line = "%s:%s" % (method.__code__.co_filename,
                                   method.__code__.co_firstlineno)

            #if handlerstr == "ufc_dofmap.create":
            #    import ipdb; ipdb.set_trace()

            # Always pass L
            assert vn[:2] == ("self", "L")
            vn = vn[2:]
            args = (L, )

            # Either pass full ir or extract ir value with keyword given by argument name
            if vn[0] == "ir":
                args += (ir, )
            elif vn[0] in ir:
                args += (ir[vn[0]], )
            else:
                error("Cannot find key '%s' in ir, argument to %s at %s." %
                      (vn[0], handlerstr, file_line))
            vn = vn[1:]

            # Optionally pass parameters
            if vn == ("parameters", ):
                args += (parameters, )
            elif vn:
                error("Invalid argument names %s to %s at %s." %
                      (vn, handlerstr, file_line))

            # Call handler
            value = method(*args)

            if isinstance(value, list):
                value = L.StatementList(value)

            # Indent body and format to str
            if isinstance(value, L.CStatement):
                value = L.Indented(
                    value.cs_format(precision=parameters["precision"]))
                value = format_indented_lines(value)
            elif not isinstance(value, str):
                error(
                    "Expecting code or string, not %s, returned from handler %s at %s."
                    % (type(value), handlerstr, file_line))

            # Store formatted code in snippets dict
            snippets[kw] = value

        # Error checking (can detect some bugs early when changing the ufc interface)
        # Get all attributes of subclass class (skip "_foo")
        attrs = set(
            name for name in dir(self)
            if not (name.startswith("_") or name.startswith("generate")))
        # Get all attributes of this base class (skip "_foo" and "generate*")
        base_attrs = set(
            name for name in dir(ufc_generator)
            if not (name.startswith("_") or name.startswith("generate")))
        # The template keywords should not contain any names not among the class attributes
        missing = set(self._keywords) - attrs
        if missing:
            warning("*** Missing generator functions:\n%s" %
                    ('\n'.join(map(str, sorted(missing))), ))
        # The class attributes should not contain any names not among the template keywords
        # (this is strict, a useful check when changing ufc, but can be dropped)
        unused = attrs - set(self._keywords) - base_attrs
        if unused:
            warning("*** Unused generator functions:\n%s" %
                    ('\n'.join(map(str, sorted(unused))), ))

        # Return snippets, a dict of code strings
        return snippets
예제 #28
0
def _tabulate_tensor(ir, prefix, parameters):
    "Generate code for a single integral (tabulate_tensor())."

    # Prefetch formatting to speedup code generation
    f_comment = format["comment"]
    f_G = format["geometry constant"]
    f_const_double = format["assign"]
    f_switch = format["switch"]
    f_float = format["float"]
    f_assign = format["assign"]
    f_A = format["element tensor"]
    f_r = format["free indices"][0]
    f_loop = format["generate loop"]
    f_int = format["int"]
    f_facet = format["facet"]

    # Get data
    opt_par = ir["optimise_parameters"]
    integral_type = ir["integral_type"]
    gdim = ir["geometric_dimension"]
    tdim = ir["topological_dimension"]
    num_facets = ir["num_facets"]
    num_vertices = ir["num_vertices"]
    prim_idims = ir["prim_idims"]
    integrals = ir["trans_integrals"]
    geo_consts = ir["geo_consts"]
    oriented = ir["needs_oriented"]
    element_data = ir["element_data"]
    num_cells = ir["num_cells"]

    # Create sets of used variables
    used_weights = set()
    used_psi_tables = set()
    used_nzcs = set()
    trans_set = set()
    sets = [used_weights, used_psi_tables, used_nzcs, trans_set]

    affine_tables = {}  # TODO: This is not populated anywhere, remove?
    quadrature_rules = ir["quadrature_rules"]

    operations = []
    if integral_type == "cell":

        # Generate code for computing element tensor
        tensor_code, mem_code, num_ops = _generate_element_tensor(
            integrals, sets, opt_par, gdim, tdim)
        tensor_code = "\n".join(tensor_code)

        # Set operations equal to num_ops (for printing info on operations).
        operations.append([num_ops])

        # Generate code for basic geometric quantities
        jacobi_code = ""
        jacobi_code += format["compute_jacobian"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["compute_jacobian_inverse"](tdim, gdim)
        if oriented:
            jacobi_code += format["orientation"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["scale factor snippet"]

        # Generate code for cell volume and circumradius
        jacobi_code += "\n\n" + format["generate cell volume"](tdim, gdim,
                                                               integral_type)
        jacobi_code += "\n\n" + format["generate circumradius"](tdim, gdim,
                                                                integral_type)

    elif integral_type == "exterior_facet":

        # Iterate over facets
        cases = [None for i in range(num_facets)]
        for i in range(num_facets):
            # Update transformer with facets and generate case code +
            # set of used geometry terms.
            c, mem_code, ops = _generate_element_tensor(
                integrals[i], sets, opt_par, gdim, tdim)
            case = [
                f_comment(
                    "Total number of operations to compute element tensor (from this point): %d"
                    % ops)
            ]
            case += c
            cases[i] = "\n".join(case)

            # Save number of operations (for printing info on
            # operations).
            operations.append([i, ops])

        # Generate tensor code for all cases using a switch.
        tensor_code = f_switch(f_facet(None), cases)

        # Generate code for basic geometric quantities
        jacobi_code = ""
        jacobi_code += format["compute_jacobian"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["compute_jacobian_inverse"](tdim, gdim)
        if oriented:
            jacobi_code += format["orientation"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += "\n\n" + format["facet determinant"](tdim, gdim)
        jacobi_code += "\n\n" + format["generate normal"](tdim, gdim,
                                                          integral_type)
        jacobi_code += "\n\n" + format["generate facet area"](tdim, gdim)
        if tdim == 3:
            jacobi_code += "\n\n" + format["generate min facet edge length"](
                tdim, gdim)
            jacobi_code += "\n\n" + format["generate max facet edge length"](
                tdim, gdim)

        # Generate code for cell volume and circumradius
        jacobi_code += "\n\n" + format["generate cell volume"](tdim, gdim,
                                                               integral_type)
        jacobi_code += "\n\n" + format["generate circumradius"](tdim, gdim,
                                                                integral_type)

    elif integral_type == "interior_facet":

        # Modify the dimensions of the primary indices because we have
        # a macro element
        prim_idims = [d * 2 for d in prim_idims]

        # Iterate over combinations of facets
        cases = [[None for j in range(num_facets)] for i in range(num_facets)]
        for i in range(num_facets):
            for j in range(num_facets):
                # Update transformer with facets and generate case
                # code + set of used geometry terms.
                c, mem_code, ops = _generate_element_tensor(
                    integrals[i][j], sets, opt_par, gdim, tdim)
                case = [
                    f_comment(
                        "Total number of operations to compute element tensor (from this point): %d"
                        % ops)
                ]
                case += c
                cases[i][j] = "\n".join(case)

                # Save number of operations (for printing info on
                # operations).
                operations.append([i, j, ops])

        # Generate tensor code for all cases using a switch.
        tensor_code = f_switch(
            f_facet("+"),
            [f_switch(f_facet("-"), cases[i]) for i in range(len(cases))])

        # Generate code for basic geometric quantities
        jacobi_code = ""
        for _r in ["+", "-"]:
            jacobi_code += format["compute_jacobian"](tdim, gdim, r=_r)
            jacobi_code += "\n"
            jacobi_code += format["compute_jacobian_inverse"](tdim, gdim, r=_r)
            if oriented:
                jacobi_code += format["orientation"](tdim, gdim, r=_r)
            jacobi_code += "\n"
        jacobi_code += "\n\n" + format["facet determinant"](tdim, gdim, r="+")
        jacobi_code += "\n\n" + format["generate normal"](tdim, gdim,
                                                          integral_type)
        jacobi_code += "\n\n" + format["generate facet area"](tdim, gdim)
        if tdim == 3:
            jacobi_code += "\n\n" + format["generate min facet edge length"](
                tdim, gdim, r="+")
            jacobi_code += "\n\n" + format["generate max facet edge length"](
                tdim, gdim, r="+")

        # Generate code for cell volume and circumradius
        jacobi_code += "\n\n" + format["generate cell volume"](tdim, gdim,
                                                               integral_type)
        jacobi_code += "\n\n" + format["generate circumradius"](tdim, gdim,
                                                                integral_type)

    elif integral_type == "vertex":

        # Iterate over vertices
        cases = [None for i in range(num_vertices)]
        for i in range(num_vertices):
            # Update transformer with vertices and generate case code
            # + set of used geometry terms.
            c, mem_code, ops = _generate_element_tensor(
                integrals[i], sets, opt_par, gdim, tdim)
            case = [
                f_comment(
                    "Total number of operations to compute element tensor (from this point): %d"
                    % ops)
            ]
            case += c
            cases[i] = "\n".join(case)

            # Save number of operations (for printing info on
            # operations).
            operations.append([i, ops])

        # Generate tensor code for all cases using a switch.
        tensor_code = f_switch(format["vertex"], cases)

        # Generate code for basic geometric quantities
        jacobi_code = ""
        jacobi_code += format["compute_jacobian"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += format["compute_jacobian_inverse"](tdim, gdim)
        if oriented:
            jacobi_code += format["orientation"](tdim, gdim)
        jacobi_code += "\n"
        jacobi_code += "\n\n" + format["facet determinant"](
            tdim, gdim)  # FIXME: This is not defined in a point???

    elif integral_type in custom_integral_types:

        # Set number of cells
        if integral_type == "cutcell":
            num_cells = 1
        elif integral_type == "interface":
            num_cells = 2
        elif integral_type == "overlap":
            num_cells = 2

        # Warning that more than two cells in only partly supported.
        # The missing piece is to couple multiple cells to
        # restrictions other than '+' and '-'.
        if num_cells > 2:
            warning(
                "Custom integrals with more than two cells only partly supported."
            )

        # Modify the dimensions of the primary indices because we have a macro element
        if num_cells == 2:
            prim_idims = [d * 2 for d in prim_idims]

        # Check whether we need to generate facet normals
        generate_custom_facet_normal = num_cells == 2

        # Generate code for computing element tensor
        tensor_code, mem_code, num_ops = _generate_element_tensor(
            integrals, sets, opt_par, gdim, tdim, generate_custom_facet_normal)

        tensor_code = "\n".join(tensor_code)

        # Set operations equal to num_ops (for printing info on
        # operations).
        operations.append([num_ops])

        # FIXME: Jacobi code is only needed when we use cell volume or
        # circumradius.
        # FIXME: Does not seem to be removed by removed_unused.

        # Generate code for basic geometric quantities
        jacobi_code = ""
        for i in range(num_cells):
            r = i if num_cells > 1 else None
            jacobi_code += "\n"
            jacobi_code += f_comment(
                "--- Compute geometric quantities on cell %d ---" % i)
            jacobi_code += "\n\n"
            if num_cells > 1:
                jacobi_code += f_comment("Extract vertex coordinates\n")
                jacobi_code += format["extract_cell_coordinates"](
                    (tdim + 1) * gdim * i, r=i)
                jacobi_code += "\n\n"
            jacobi_code += format["compute_jacobian"](tdim, gdim, r=r)
            jacobi_code += "\n"
            jacobi_code += format["compute_jacobian_inverse"](tdim, gdim, r=r)
            jacobi_code += "\n"
            jacobi_code += format["generate cell volume"](
                tdim, gdim, integral_type, r=r if num_cells > 1 else None)
            jacobi_code += "\n"
            jacobi_code += format["generate circumradius"](
                tdim, gdim, integral_type, r=r if num_cells > 1 else None)
            jacobi_code += "\n"

    else:
        error("Unhandled integral type: " + str(integral_type))

    # After we have generated the element code for all facets we can
    # remove the unused transformations.
    common = [remove_unused(jacobi_code, trans_set)]

    # FIXME: After introduction of custom integrals, the common code
    # here is not really common anymore. Think about how to
    # restructure this function.

    # Add common code except for custom integrals
    if integral_type not in custom_integral_types:
        common += _tabulate_weights(
            [quadrature_rules[p] for p in sorted(used_weights)])

        # Add common code for updating tables
        name_map = ir["name_map"]
        tables = ir["unique_tables"]
        tables.update(
            affine_tables)  # TODO: This is not populated anywhere, remove?
        common += _tabulate_psis(tables, used_psi_tables, name_map, used_nzcs,
                                 opt_par, integral_type, gdim)

    # Add special tabulation code for custom integral
    else:
        common += _evaluate_basis_at_quadrature_points(used_psi_tables, gdim,
                                                       element_data, prefix,
                                                       num_vertices, num_cells)

    # Reset the element tensor (array 'A' given as argument to
    # tabulate_tensor() by assembler)
    # Handle functionals.
    common += [f_comment("Reset values in the element tensor.")]
    if prim_idims == []:
        common += [f_assign(f_A(f_int(0)), f_float(0))]
    else:
        dim = functools.reduce(lambda v, u: v * u, prim_idims)
        common += f_loop([f_assign(f_A(f_r), f_float(0))], [(f_r, 0, dim)])

    # Create the constant geometry declarations (only generated if
    # simplify expressions are enabled).
    geo_ops, geo_code = generate_aux_constants(geo_consts, f_G, f_const_double)
    if geo_code:
        common += [
            f_comment(
                "Number of operations to compute geometry constants: %d." %
                geo_ops)
        ]
        common += [
            format["declaration"](format["float declaration"],
                                  f_G(len(geo_consts)))
        ]
        common += geo_code

    # Add comments.
    common += [
        "",
        f_comment("Compute element tensor using UFL quadrature representation")
    ]
    common += [
        f_comment(
            "Optimisations: %s" %
            ", ".join([str((k, opt_par[k])) for k in sorted(opt_par.keys())]))
    ]

    for ops in operations:
        # Add geo ops count to integral ops count for writing info.
        if isinstance(ops[-1], int):
            ops[-1] += geo_ops

    return "\n".join(common) + "\n" + tensor_code
예제 #29
0
def _attach_integral_metadata(form_data, parameters):
    "Attach integral metadata"

    # Recognized metadata keys
    metadata_keys = ("representation", "quadrature_degree", "quadrature_rule")

    # Iterate over integral collections
    quad_schemes = []
    for ida in form_data.integral_data:
        # TODO: Is it possible to detach this from IntegralData? It's a bit strange from the ufl side.
        common_metadata = ida.metadata

        # Iterate over integrals
        integral_metadatas = []
        for integral in ida.integrals:

            # Fill in integral metadata with default values
            # NB! This modifies the metadata of the input integral data!
            integral_metadata = integral.metadata() or {}
            for key in metadata_keys:
                if key not in integral_metadata:
                    integral_metadata[key] = parameters[key]

            # Automatic selection of representation
            r = integral_metadata["representation"]

            # Hack to override representation with environment variable
            forced_r = os.environ.get("FFC_FORCE_REPRESENTATION")
            if forced_r:
                r = forced_r
                warning("representation:    forced by $FFC_FORCE_REPRESENTATION to '%s'" % r)
            elif r == "auto":
                r = _auto_select_representation(integral,
                                                form_data.unique_sub_elements,
                                                form_data.function_replace_map)
                info("representation:    auto --> %s" % r)
            elif r in ("quadrature", "tensor", "uflacs"):
                info("representation:    %s" % r)
            else:
                info("Valid choices are 'tensor', 'quadrature', 'uflacs', or 'auto'.")
                error("Illegal choice of representation for integral: " + str(r))
            integral_metadata["representation"] = r

            # Automatic selection of quadrature degree
            qd = integral_metadata["quadrature_degree"]
            # Special case: handling -1 as "auto" for quadrature_degree
            if qd in ("auto", -1):
                qd = _auto_select_quadrature_degree(integral.integrand(),
                                                    r,
                                                    form_data.unique_sub_elements,
                                                    form_data.element_replace_map)
                info("quadrature_degree: auto --> " + str(qd))
            else:
                if isinstance(qd, tuple):
                    qd = tuple(int(q) for q in qd)
                else:
                    qd = int(qd)
                info("quadrature_degree: " + str(qd))
            # Validate degree
            if not qd >= 0:
                info("Valid choices are nonnegative integers or 'auto'.")
                error("Illegal quadrature degree for integral: " + str(qd))
            tdim = integral.ufl_domain().topological_dimension()
            _check_quadrature_degree(qd, tdim)

            integral_metadata["quadrature_degree"] = qd
            assert isinstance(qd, (int, tuple))

            # Automatic selection of quadrature rule
            qr = integral_metadata["quadrature_rule"]
            if qr == "auto":
                # Just use default for now.
                qr = "default"
                info("quadrature_rule:   auto --> %s" % qr)
            elif qr in ("default", "canonical", "vertex"):
                info("quadrature_rule:   %s" % qr)
            else:
                info("Valid choices are 'default', 'canonical', 'vertex', and 'auto'.")
                error("Illegal choice of quadrature rule for integral: " + str(qr))
            integral_metadata["quadrature_rule"] = qr
            quad_schemes.append(qr)

            # Append to list of metadata
            integral_metadatas.append(integral_metadata)

        # Extract common metadata for integral collection
        if len(ida.integrals) == 1:
            common_metadata.update(integral_metadatas[0])
        else:
            # Check that representation is the same
            # (Generating code with different representations within a
            # single tabulate_tensor is considered not worth the effort)
            representations = [md["representation"] for md in integral_metadatas]
            if all_equal(representations):
                r = representations[0]
            else:
                r = "quadrature"
                info("Integral representation must be equal within each sub domain, using %s representation." % r)

            # Check that quadrature degree is the same
            # FIXME: Why must the degree within a sub domain be the same?
            #        This makes no sense considering that num_points is
            #        used as a key all over in quadrature representation...
            quadrature_degrees = [md["quadrature_degree"] for md in integral_metadatas]
            if all_equal(quadrature_degrees):
                qd = quadrature_degrees[0]
            else:
                if isinstance(quadrature_degrees[0], tuple):
                    qd = tuple(map(max, zip(*quadrature_degrees)))
                else:
                    qd = max(quadrature_degrees)
                info("Quadrature degree must be equal within each sub domain, using degree " + str(qd))
            assert isinstance(qd, (int, tuple))

            # Check that quadrature rule is the same
            # FIXME: Why must the rule within a sub domain be the same?
            #        To support this would be more work since num_points is used
            #        to identify quadrature rules in the quadrature representation.
            quadrature_rules = [md["quadrature_rule"] for md in integral_metadatas]
            if all_equal(quadrature_rules):
                qr = quadrature_rules[0]
            else:
                qr = "canonical"
                info("Quadrature rule must be equal within each sub domain, using %s rule." % qr)

            # Update common metadata
            assert isinstance(qd, (int, tuple))
            common_metadata["representation"] = r
            common_metadata["quadrature_degree"] = qd
            common_metadata["quadrature_rule"] = qr

    # Update scheme for QuadratureElements
    if quad_schemes and all_equal(quad_schemes):
        scheme = quad_schemes[0]
    else:
        scheme = "canonical"
        info("Quadrature rule must be equal within each sub domain, using %s rule." % scheme)

    # FIXME: This modifies the elements depending on the form compiler parameters,
    #        this is a serious breach of the immutability of ufl objects, since the
    #        element quad scheme is part of the signature and hash of the element...
    for element in form_data.unique_sub_elements:
        if element.family() == "Quadrature":
            element._quad_scheme = scheme
예제 #30
0
def tabulate_basis(sorted_integrals, form_data, itg_data):
    "Tabulate the basisfunctions and derivatives."

    # MER: Note to newbies: this code assumes that each integral in
    # the dictionary of sorted_integrals that enters here, has a
    # unique number of quadrature points ...

    # Initialise return values.
    quadrature_rules = {}
    psi_tables = {}
    integrals = {}
    avg_elements = { "cell": [], "facet": [] }

    # Get some useful variables in short form
    integral_type = itg_data.integral_type
    cell = itg_data.domain.cell()

    # Create canonical ordering of quadrature rules
    rules = sorted(sorted_integrals.keys())

    # Loop the quadrature points and tabulate the basis values.
    for degree, scheme in rules:

        # --------- Creating quadrature rule
        # Make quadrature rule and get points and weights.
        (points, weights) = create_quadrature_points_and_weights(integral_type, cell, degree, scheme)

        # The TOTAL number of weights/points
        len_weights = None if weights is None else len(weights)

        # Add points and rules to dictionary
        ffc_assert(len_weights not in quadrature_rules,
                   "This number of points is already present in the weight table: " + repr(quadrature_rules))
        quadrature_rules[len_weights] = (weights, points)


        # --------- Store integral
        # Add the integral with the number of points as a key to the return integrals.
        integral = sorted_integrals[(degree, scheme)]
        ffc_assert(len_weights not in integrals, \
                   "This number of points is already present in the integrals: " + repr(integrals))
        integrals[len_weights] = integral


        # --------- Analyse UFL elements in integral

        # Get all unique elements in integral.
        ufl_elements = [form_data.element_replace_map[e]
                        for e in extract_unique_elements(integral)]

        # Insert elements for x and J
        domain = integral.ufl_domain() # FIXME: For all domains to be sure? Better to rewrite though.
        x_element = domain.ufl_coordinate_element()
        if x_element not in ufl_elements:
            if integral_type == "custom":
                # FIXME: Not yet implemented, in progress
                warning("Vector elements not yet supported in custom integrals so element for coordinate function x will not be generated.")
            else:
                ufl_elements.append(x_element)

        # Find all CellAvg and FacetAvg in integrals and extract elements
        for avg, AvgType in (("cell", CellAvg), ("facet", FacetAvg)):
            expressions = extract_type(integral, AvgType)
            avg_elements[avg] = [form_data.element_replace_map[e]
                                 for expr in expressions
                                 for e in extract_unique_elements(expr)]

        # Find the highest number of derivatives needed for each element
        num_derivatives = _find_element_derivatives(integral.integrand(), ufl_elements,
                                                    form_data.element_replace_map)
        # Need at least 1 for the Jacobian
        num_derivatives[x_element] = max(num_derivatives.get(x_element,0), 1)


        # --------- Evaluate FIAT elements in quadrature points and store in tables

        # Add the number of points to the psi tables dictionary.
        ffc_assert(len_weights not in psi_tables, \
                   "This number of points is already present in the psi table: " + repr(psi_tables))
        psi_tables[len_weights] = {}

        # Loop FIAT elements and tabulate basis as usual.
        for ufl_element in ufl_elements:
            fiat_element = create_element(ufl_element)

            # Tabulate table of basis functions and derivatives in points
            psi_table = _tabulate_psi_table(integral_type, cell, fiat_element,
                                            num_derivatives[ufl_element], points)

            # Insert table into dictionary based on UFL elements. (None=not averaged)
            psi_tables[len_weights][ufl_element] = { None: psi_table }


    # Loop over elements found in CellAvg and tabulate basis averages
    len_weights = 1
    for avg in ("cell", "facet"):
        # Doesn't matter if it's exterior or interior
        if avg == "cell":
            avg_integral_type = "cell"
        elif avg == "facet":
            avg_integral_type = "exterior_facet"

        for element in avg_elements[avg]:
            fiat_element = create_element(element)

            # Make quadrature rule and get points and weights.
            (points, weights) = create_quadrature_points_and_weights(avg_integral_type, cell, element.degree(), "default")
            wsum = sum(weights)

            # Tabulate table of basis functions and derivatives in points
            entity_psi_tables = _tabulate_psi_table(avg_integral_type, cell, fiat_element, 0, points)
            rank = len(element.value_shape())

            # Hack, duplicating table with per-cell values for each facet in the case of cell_avg(f) in a facet integral
            actual_entities = _tabulate_entities(integral_type, cell)
            if len(actual_entities) > len(entity_psi_tables):
                assert len(entity_psi_tables) == 1
                assert avg_integral_type == "cell"
                assert "facet" in integral_type
                v, = sorted(entity_psi_tables.values())
                entity_psi_tables = dict((e, v) for e in actual_entities)

            for entity, deriv_table in sorted(entity_psi_tables.items()):
                deriv, = sorted(deriv_table.keys()) # Not expecting derivatives of averages
                psi_table = deriv_table[deriv]

                if rank:
                    # Compute numeric integral
                    num_dofs, num_components, num_points = psi_table.shape
                    ffc_assert(num_points == len(weights), "Weights and table shape does not match.")
                    avg_psi_table = numpy.asarray([[[numpy.dot(psi_table[j,k,:], weights) / wsum]
                                                   for k in range(num_components)]
                                                   for j in range(num_dofs)])
                else:
                    # Compute numeric integral
                    num_dofs, num_points = psi_table.shape
                    ffc_assert(num_points == len(weights), "Weights and table shape does not match.")
                    avg_psi_table = numpy.asarray([[numpy.dot(psi_table[j,:], weights) / wsum] for j in range(num_dofs)])

                # Insert table into dictionary based on UFL elements.
                insert_nested_dict(psi_tables, (len_weights, element, avg, entity, deriv), avg_psi_table)

    return (integrals, psi_tables, quadrature_rules)