def __new__(cls, a, b):
        a = as_ufl(a)
        b = as_ufl(b)

        # Assertions
        # TODO: Enabled workaround for nonscalar division in __div__,
        # so maybe we can keep this assertion. Some algorithms may need updating.
        if not is_ufl_scalar(a):
            error("Expecting scalar nominator in Division.")
        if not is_true_ufl_scalar(b):
            error("Division by non-scalar is undefined.")
        if isinstance(b, Zero):
            error("Division by zero!")

        # Simplification a/b -> a
        if isinstance(a, Zero) or b == 1:
            return a
        # Simplification "literal a / literal b" -> "literal value of a/b"
        # Avoiding integer division by casting to float
        if isinstance(a, ScalarValue) and isinstance(b, ScalarValue):
            return as_ufl(float(a._value) / float(b._value))
        # Simplification "a / a" -> "1"
        if not a.free_indices() and not a.shape() and a == b:
            return as_ufl(1)

        # construct and initialize a new Division object
        self = AlgebraOperator.__new__(cls)
        self._init(a, b)
        return self
    def division(self, o, a, b):
        f, fp = a
        g, gp = b
        o = self.reuse_if_possible(o, f, g)

        ufl_assert(is_ufl_scalar(f), "Not expecting nonscalar nominator")
        ufl_assert(is_true_ufl_scalar(g), "Not expecting nonscalar denominator")

        #do_df = 1/g
        #do_dg = -h/g
        #op = do_df*fp + do_df*gp
        #op = (fp - o*gp) / g

        # Get o and gp as scalars, multiply, then wrap as a tensor again
        so, oi = as_scalar(o)
        sgp, gi = as_scalar(gp)
        o_gp = so*sgp
        if oi or gi:
            o_gp = as_tensor(o_gp, oi + gi)
        op = (fp - o_gp) / g

        return (o, op)
Beispiel #3
0
    def division(self, o, fp, gp):
        f, g = o.ufl_operands

        if not is_ufl_scalar(f):
            error("Not expecting nonscalar nominator")
        if not is_true_ufl_scalar(g):
            error("Not expecting nonscalar denominator")

        # do_df = 1/g
        # do_dg = -h/g
        # op = do_df*fp + do_df*gp
        # op = (fp - o*gp) / g

        # Get o and gp as scalars, multiply, then wrap as a tensor
        # again
        so, oi = as_scalar(o)
        sgp, gi = as_scalar(gp)
        o_gp = so * sgp
        if oi or gi:
            o_gp = as_tensor(o_gp, oi + gi)
        op = (fp - o_gp) / g

        return op
Beispiel #4
0
    def division(self, o, fp, gp):
        f, g = o.ufl_operands

        if not is_ufl_scalar(f):
            error("Not expecting nonscalar nominator")
        if not is_true_ufl_scalar(g):
            error("Not expecting nonscalar denominator")

        # do_df = 1/g
        # do_dg = -h/g
        # op = do_df*fp + do_df*gp
        # op = (fp - o*gp) / g

        # Get o and gp as scalars, multiply, then wrap as a tensor
        # again
        so, oi = as_scalar(o)
        sgp, gi = as_scalar(gp)
        o_gp = so * sgp
        if oi or gi:
            o_gp = as_tensor(o_gp, oi + gi)
        op = (fp - o_gp) / g

        return op
    def division(self, o, a, b):
        f, fp = a
        g, gp = b
        o = self.reuse_if_possible(o, f, g)

        ufl_assert(is_ufl_scalar(f), "Not expecting nonscalar nominator")
        ufl_assert(is_true_ufl_scalar(g),
                   "Not expecting nonscalar denominator")

        #do_df = 1/g
        #do_dg = -h/g
        #op = do_df*fp + do_df*gp
        #op = (fp - o*gp) / g

        # Get o and gp as scalars, multiply, then wrap as a tensor again
        so, oi = as_scalar(o)
        sgp, gi = as_scalar(gp)
        o_gp = so * sgp
        if oi or gi:
            o_gp = as_tensor(o_gp, oi + gi)
        op = (fp - o_gp) / g

        return (o, op)