def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ x = arg_objs[0] axis = data[0] t = lu.create_var(size) # sum(exp(x - t)) <= 1 if axis is None: prom_t = lu.promote(t, x.size) expr = lu.sub_expr(x, prom_t) obj, constraints = exp.graph_implementation([expr], x.size) obj = lu.sum_entries(obj) elif axis == 0: prom_size = (x.size[0], 1) ones = lu.create_const(np.ones(prom_size), prom_size) prom_t = lu.mul_expr(ones, t, x.size) expr = lu.sub_expr(x, prom_t) obj, constraints = exp.graph_implementation([expr], x.size) const_size = (1, x.size[0]) ones = lu.create_const(np.ones(const_size), const_size) obj = lu.mul_expr(ones, obj, size) else: # axis == 1 prom_size = (1, x.size[1]) ones = lu.create_const(np.ones(prom_size), prom_size) prom_t = lu.rmul_expr(t, ones, x.size) expr = lu.sub_expr(x, prom_t) obj, constraints = exp.graph_implementation([expr], x.size) const_size = (x.size[1], 1) ones = lu.create_const(np.ones(const_size), const_size) obj = lu.rmul_expr(obj, ones, size) ones = lu.create_const(np.ones(size), size) constraints += [lu.create_leq(obj, ones)] return (t, constraints)
def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ axis = data[0] if axis is None: t = lu.create_var((1, 1)) promoted_t = lu.promote(t, arg_objs[0].size) elif axis == 0: t = lu.create_var((1, arg_objs[0].size[1])) const_size = (arg_objs[0].size[0], 1) ones = lu.create_const(np.ones(const_size), const_size) promoted_t = lu.mul_expr(ones, t, arg_objs[0].size) else: # axis == 1 t = lu.create_var((arg_objs[0].size[0], 1)) const_size = (1, arg_objs[0].size[1]) ones = lu.create_const(np.ones(const_size), const_size) promoted_t = lu.rmul_expr(t, ones, arg_objs[0].size) constraints = [lu.create_leq(arg_objs[0], promoted_t)] return (t, constraints)
def graph_implementation(self, arg_objs, shape, data=None): """Multiply the linear expressions. Parameters ---------- arg_objs : list LinExpr for each argument. shape : tuple The shape of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ # Promote shapes for compatibility with CVXCanon lhs = arg_objs[0] rhs = arg_objs[1] if self.args[0].is_constant(): return (lu.mul_expr(lhs, rhs, shape), []) elif self.args[1].is_constant(): return (lu.rmul_expr(lhs, rhs, shape), []) else: raise DCPError("Product of two non-constant expressions is not " "DCP.")
def graph_implementation(arg_objs, size, data=None): """Sum the linear expression's entries. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ axis = data[0] if axis is None: obj = lu.sum_entries(arg_objs[0]) elif axis == 1: const_size = (arg_objs[0].size[1], 1) ones = lu.create_const(np.ones(const_size), const_size) obj = lu.rmul_expr(arg_objs[0], ones, size) else: # axis == 0 const_size = (1, arg_objs[0].size[0]) ones = lu.create_const(np.ones(const_size), const_size) obj = lu.mul_expr(ones, arg_objs[0], size) return (obj, [])
def graph_implementation(self, arg_objs, shape, data=None): """Cumulative sum via difference matrix. Parameters ---------- arg_objs : list LinExpr for each argument. shape : tuple The shape of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ # Implicit O(n) definition: # X = Y[1:,:] - Y[:-1, :] Y = lu.create_var(shape) axis = data[0] dim = shape[axis] diff_mat = get_diff_mat(dim, axis) diff_mat = lu.create_const(diff_mat, (dim, dim), sparse=True) if axis == 0: diff = lu.mul_expr(diff_mat, Y) else: diff = lu.rmul_expr(Y, diff_mat) return (Y, [lu.create_eq(arg_objs[0], diff)])
def graph_implementation(arg_objs, size, data=None): """Convolve two vectors. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ # Implicit O(n) definition: # X = Y[:1,:] - Y[1:,:] Y = lu.create_var(size) axis = data[0] dim = size[axis] diff_mat = get_diff_mat(dim, axis) diff_mat = lu.create_const(diff_mat, (dim, dim), sparse=True) if axis == 0: diff = lu.mul_expr(diff_mat, Y, size) else: diff = lu.rmul_expr(Y, diff_mat, size) return (Y, [lu.create_eq(arg_objs[0], diff)])
def graph_implementation(arg_objs, size, data=None): """Cumulative sum via difference matrix. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ # Implicit O(n) definition: # X = Y[:1,:] - Y[1:,:] Y = lu.create_var(size) axis = data[0] dim = size[axis] diff_mat = get_diff_mat(dim, axis) diff_mat = lu.create_const(diff_mat, (dim, dim), sparse=True) if axis == 0: diff = lu.mul_expr(diff_mat, Y, size) else: diff = lu.rmul_expr(Y, diff_mat, size) return (Y, [lu.create_eq(arg_objs[0], diff)])
def canonicalize(self): obj, constraints = super(Assign, self).canonicalize() shape = (self.size[1], 1) one_row_vec = lu.create_const(np.ones(shape), shape) shape = (1, self.size[0]) one_col_vec = lu.create_const(np.ones(shape), shape) # Row sum <= 1 row_sum = lu.rmul_expr(obj, one_row_vec, (self.size[0], 1)) constraints += [lu.create_leq(row_sum, lu.transpose(one_col_vec))] # Col sum == 1. col_sum = lu.mul_expr(one_col_vec, obj, (1, self.size[1])) constraints += [lu.create_eq(col_sum, lu.transpose(one_row_vec))] return (obj, constraints)
def graph_implementation(self, arg_objs, shape: Tuple[int, ...], data=None) -> Tuple[lo.LinOp, List[Constraint]]: """Sum the linear expression's entries. Parameters ---------- arg_objs : list LinExpr for each argument. shape : tuple The shape of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ axis = data[0] keepdims = data[1] if axis is None: obj = lu.sum_entries(arg_objs[0], shape=shape) elif axis == 1: if keepdims: const_shape = (arg_objs[0].shape[1], 1) else: const_shape = (arg_objs[0].shape[1], ) ones = lu.create_const(np.ones(const_shape), const_shape) obj = lu.rmul_expr(arg_objs[0], ones, shape) else: # axis == 0 if keepdims: const_shape = (1, arg_objs[0].shape[0]) else: const_shape = (arg_objs[0].shape[0], ) ones = lu.create_const(np.ones(const_shape), const_shape) obj = lu.mul_expr(ones, arg_objs[0], shape) return (obj, [])
def graph_implementation(arg_objs, size, data=None): """Multiply the linear expressions. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ # Promote the left hand side to a diagonal matrix if necessary. if size[0] != 1 and arg_objs[0].size == (1, 1): arg = lu.promote(arg_objs[0], (size[0], 1)) arg_objs[0] = lu.diag_vec(arg) return (lu.rmul_expr(arg_objs[0], arg_objs[1], size), [])