示例#1
0
 def A(self, x):
     xs = {}
     for var_id, var_size in self.sym_data.var_sizes.items():
         var_offset = self.sym_data.var_offsets[var_id]
         idx = slice(var_offset, var_offset + var_size[0] * var_size[1])
         xs[var_id] = vector_ops.mat(x[idx, :], var_size)
     return vector_ops.vstack([
         vector_ops.vec(expressions.tensor(Ai, xs)) for Ai in self.A_exprs
     ])
示例#2
0
def get_constraint_tensors(constraints):
    """Get expression for Ax + b."""
    A_exprs = [constr.expr for constr in tree_mat.prune_constants(constraints)]
    b = vector_ops.vstack([
        vector_ops.vec(
            tf.constant(-tree_mat.mul(constr.expr, {}), dtype=tf.float32))
        for constr in constraints
    ])
    return A_exprs, b
示例#3
0
 def AT(self, y):
     ys = []
     offset = 0
     for constr in self.constraints:
         idx = slice(offset, offset + constr.size[0] * constr.size[1])
         ys.append(vector_ops.mat(y[idx, :], constr.size))
         offset += constr.size[0] * constr.size[1]
     x_map = sum_dicts(
         expressions.adjoint_tensor(Ai, ys[i])
         for i, Ai in enumerate(self.A_exprs))
     return vector_ops.vstack(
         [vector_ops.vec(x_map[var_id]) for var_id in self.var_ids])
示例#4
0
def get_objective_tensor(var_ids, sym_data):
    """Get objective tensor via gradient of c'x."""
    xs = [
        tf.Variable(tf.zeros(sym_data.var_sizes[var_id], dtype=tf.float32))
        for var_id in var_ids
    ]
    xs_map = dict(zip((var_id for var_id in var_ids), xs))
    obj_t = expressions.tensor(sym_data.objective, xs_map)

    # get gradient, handling None values
    return vector_ops.vstack([
        vector_ops.vec(ci) if ci is not None else vector_ops.vec(
            tf.zeros(sym_data.var_sizes[var_ids[i]], dtype=tf.float32))
        for i, ci in enumerate(tf.gradients(obj_t, xs))
    ])