Beispiel #1
0
    def visit(self, op, x):
        """
        TODO.

        Arguments:
          op: TODO

        Returns:
          TODO
        """
        if x.is_scalar and x.is_constant:
            self.replace_op(op, constant(-x.const))
Beispiel #2
0
    def visit(self, op, x):
        """
        TODO.

        Arguments:
          op: TODO

        Returns:
          TODO
        """
        if x.is_scalar and x.is_constant:
            val = power(x.const, op.reduction_axes.size)
            self.replace_op(op, constant(val))
Beispiel #3
0
    def visit(self, op):
        """
        TODO.

        Arguments:
          op: TODO

        Returns:
          TODO
        """
        x, = op.args
        if x.is_scalar and x.is_constant:
            val = x.const * op.reduction_axes.size
            self.replace_op(op, constant(val))
Beispiel #4
0
    def visit(self, op, x):
        """
        If x is filled with the same value, then replace the prod op
        with `power`.

        Arguments:
          op: TODO

        Returns:
          TODO
        """
        if x.is_scalar and x.is_constant:
            val = power(x.const, op.reduction_axes.size)
            self.replace_op(op, constant(val))
Beispiel #5
0
    def visit(self, op, x):
        """
        If x is filled with the same value, then replace the prod op
        with x.const ** reduction_axes.size.

        Arguments:
          op: TODO

        Returns:
          TODO
        """
        if x.is_scalar and x.is_constant:
            val = x.const**op.reduction_axes.size
            self.replace_op(op, constant(val, op.axes))
Beispiel #6
0
    def _deriv(dependent, independent, error=constant(1)):
        """
        Computes the operation for [dDependent/dIndependent](error=1).
        The derivative is a multi-linear function.
        Args:
            dependent (TensorOp): Dependent op.
            independent(TensorOp): Independent op.
            error (TensorOp, optional): The tensor holding the error where the
                derivative will be computed at. Must have the same axes as dependent.
        Returns:
            TensorOp: Derivative applied to error. Has axes of independent.
        """
        if not error.axes.has_same_axes(dependent.axes):
            raise ValueError(
                "Dependent and error must have the same set of axes")

        adjoints = dependent.forwarded.adjoints(error)

        if independent.forwarded.tensor not in adjoints:
            return constant(0, independent.axes)

        adjoint = adjoints[independent.forwarded.tensor]
        return broadcast(adjoint.forwarded, axes=independent.axes)