Exemplo n.º 1
0
    def graph_implementation(arg_objs, size, data=None):
        """Reduces the atom to an affine expression and list of constraints.

        Parameters
        ----------
        arg_objs : list
            LinExpr for each argument.
        size : tuple
            The size of the resulting expression.
        data :
            Additional data required by the atom.

        Returns
        -------
        tuple
            (LinOp for objective, list of constraints)
        """
        x = Elementwise._promote(arg_objs[0], size)
        y = Elementwise._promote(arg_objs[1], size)
        t = lu.create_var(size)
        constraints = [ExpCone(t, x, y),
                       lu.create_geq(y)]  # 0 <= y
        # -t - x + y
        obj = lu.sub_expr(y, lu.sum_expr([x, t]))
        return (obj, constraints)
Exemplo n.º 2
0
def min_elemwise(*args):
    if len(args) == 0 or (len(args) == 1 and not isinstance(args[0], list)):
        raise TypeError(
            "min_elemwise requires at least two arguments or a list.")
    elif len(args) == 1:
        args = args[0]
    return -max_elemwise([-Elementwise.cast_to_const(arg) for arg in args])
Exemplo n.º 3
0
    def _grad(self, values):
        """Gives the (sub/super)gradient of the atom w.r.t. each argument.

        Matrix expressions are vectorized, so the gradient is a matrix.

        Args:
            values: A list of numeric values for the arguments.

        Returns:
            A list of SciPy CSC sparse matrices or None.
        """
        rows = self.args[0].size[0]*self.args[0].size[1]
        cols = self.size[0]*self.size[1]
        if self.p == 0:
            # All zeros.
            return [sp.csc_matrix((rows, cols), dtype='float64')]
        # Outside domain or on boundary.
        if not is_power2(self.p) and np.min(values[0]) <= 0:
            if self.p < 1:
                # Non-differentiable.
                return [None]
            else:
                # Round up to zero.
                values[0] = np.maximum(values[0], 0)

        grad_vals = self.p*np.power(values[0], self.p-1)
        return [Elementwise.elemwise_grad_to_diag(grad_vals, rows, cols)]
Exemplo n.º 4
0
    def _grad(self, values):
        """Gives the (sub/super)gradient of the atom w.r.t. each argument.

        Matrix expressions are vectorized, so the gradient is a matrix.

        Args:
            values: A list of numeric values for the arguments.

        Returns:
            A list of SciPy CSC sparse matrices or None.
        """
        rows = self.args[0].size[0] * self.args[0].size[1]
        cols = self.size[0] * self.size[1]
        if self.p == 0:
            # All zeros.
            return [sp.csc_matrix((rows, cols), dtype='float64')]
        # Outside domain or on boundary.
        if not is_power2(self.p) and np.min(values[0]) <= 0:
            if self.p < 1:
                # Non-differentiable.
                return [None]
            else:
                # Round up to zero.
                values[0] = np.maximum(values[0], 0)

        grad_vals = self.p * np.power(values[0], self.p - 1)
        return [Elementwise.elemwise_grad_to_diag(grad_vals, rows, cols)]
Exemplo n.º 5
0
    def graph_implementation(arg_objs, size, data=None):
        """Reduces the atom to an affine expression and list of constraints.

        Parameters
        ----------
        arg_objs : list
            LinExpr for each argument.
        size : tuple
            The size of the resulting expression.
        data :
            Additional data required by the atom.

        Returns
        -------
        tuple
            (LinOp for objective, list of constraints)
        """
        x = Elementwise._promote(arg_objs[0], size)
        y = Elementwise._promote(arg_objs[1], size)
        t = lu.create_var(size)
        constraints = [ExpCone(t, x, y), lu.create_geq(y)]  # 0 <= y
        # -t - x + y
        obj = lu.sub_expr(y, lu.sum_expr([x, t]))
        return (obj, constraints)
Exemplo n.º 6
0
Arquivo: log.py Projeto: giserh/cvxpy
    def _grad(self, values):
        """Gives the (sub/super)gradient of the atom w.r.t. each argument.

        Matrix expressions are vectorized, so the gradient is a matrix.

        Args:
            values: A list of numeric values for the arguments.

        Returns:
            A list of SciPy CSC sparse matrices or None.
        """
        rows = self.args[0].size[0] * self.args[0].size[1]
        cols = self.size[0] * self.size[1]
        # Outside domain or on boundary.
        if np.min(values[0]) <= 0:
            # Non-differentiable.
            return [None]
        else:
            grad_vals = 1.0 / values[0]
            return [Elementwise.elemwise_grad_to_diag(grad_vals, rows, cols)]
Exemplo n.º 7
0
Arquivo: log.py Projeto: giserh/cvxpy
    def _grad(self, values):
        """Gives the (sub/super)gradient of the atom w.r.t. each argument.

        Matrix expressions are vectorized, so the gradient is a matrix.

        Args:
            values: A list of numeric values for the arguments.

        Returns:
            A list of SciPy CSC sparse matrices or None.
        """
        rows = self.args[0].size[0]*self.args[0].size[1]
        cols = self.size[0]*self.size[1]
        # Outside domain or on boundary.
        if np.min(values[0]) <= 0:
            # Non-differentiable.
            return [None]
        else:
            grad_vals = 1.0/values[0]
            return [Elementwise.elemwise_grad_to_diag(grad_vals, rows, cols)]
Exemplo n.º 8
0
    def graph_implementation(arg_objs, shape, data=None):
        """Reduces the atom to an affine expression and list of constraints.

        Parameters
        ----------
        arg_objs : list
            LinExpr for each argument.
        shape : tuple
            The shape of the resulting expression.
        data :
            Additional data required by the atom.

        Returns
        -------
        tuple
            (LinOp for objective, list of constraints)
        """
        t = lu.create_var(shape)
        constraints = []
        for obj in arg_objs:
            obj = Elementwise._promote(obj, shape)
            constraints.append(lu.create_leq(obj, t))
        return (t, constraints)
Exemplo n.º 9
0
    def graph_implementation(arg_objs, size, data=None):
        """Reduces the atom to an affine expression and list of constraints.

        Parameters
        ----------
        arg_objs : list
            LinExpr for each argument.
        size : tuple
            The size of the resulting expression.
        data :
            Additional data required by the atom.

        Returns
        -------
        tuple
            (LinOp for objective, list of constraints)
        """
        t = lu.create_var(size)
        constraints = []
        for obj in arg_objs:
            obj = Elementwise._promote(obj, size)
            constraints.append(lu.create_leq(obj, t))
        return (t, constraints)
Exemplo n.º 10
0
def minimum(*args):
    return -maximum(*[-Elementwise.cast_to_const(arg) for arg in args])
Exemplo n.º 11
0
def minimum(*args):
    """Elementwise minimum of a sequence of Expressions."""
    return -maximum(*[-Elementwise.cast_to_const(arg) for arg in args])
Exemplo n.º 12
0
def min_elemwise(*args):
    return -max_elemwise(*[-Elementwise.cast_to_const(arg) for arg in args])
Exemplo n.º 13
0
def min_elemwise(*args):
    return -max_elemwise(*[-Elementwise.cast_to_const(arg) for arg in args])
Exemplo n.º 14
0
def min_elemwise(*args):
    if len(args) == 0 or (len(args) == 1 and not isinstance(args[0], list)):
        raise TypeError("min_elemwise requires at least two arguments or a list.")
    elif len(args) == 1:
        args = args[0]
    return -max_elemwise([-Elementwise.cast_to_const(arg) for arg in args])