def _min_matrix_dim(self):
   """Minimum of domain/range dimension, if statically available, else None."""
   domain_dim = tensor_shape.dimension_value(self.domain_dimension)
   range_dim = tensor_shape.dimension_value(self.range_dimension)
   if domain_dim is None or range_dim is None:
     return None
   return min(domain_dim, range_dim)
예제 #2
0
 def _set_diag_operators(self, diag_update, is_diag_update_positive):
     """Set attributes self._diag_update and self._diag_operator."""
     if diag_update is not None:
         self._diag_operator = linear_operator_diag.LinearOperatorDiag(
             self._diag_update,
             is_positive_definite=is_diag_update_positive)
         self._diag_inv_operator = linear_operator_diag.LinearOperatorDiag(
             1. / self._diag_update,
             is_positive_definite=is_diag_update_positive)
     else:
         if tensor_shape.dimension_value(
                 _ops.TensorShape(self.u.shape)[-1]) is not None:
             r = tensor_shape.dimension_value(
                 _ops.TensorShape(self.u.shape)[-1])
         else:
             r = array_ops.shape(self.u)[-1]
         self._diag_operator = linear_operator_identity.LinearOperatorIdentity(
             num_rows=r, dtype=self.dtype)
         self._diag_inv_operator = self._diag_operator
예제 #3
0
def _static_check_for_same_dimensions(operators):
    """ValueError if operators determined to have different dimensions."""
    if len(operators) < 2:
        return

    domain_dimensions = [
        (op.name, tensor_shape.dimension_value(op.domain_dimension))
        for op in operators
        if tensor_shape.dimension_value(op.domain_dimension) is not None
    ]
    if len(set(value for name, value in domain_dimensions)) > 1:
        raise ValueError(
            "Operators must have the same domain dimension. Found: %s" %
            domain_dimensions)

    range_dimensions = [
        (op.name, tensor_shape.dimension_value(op.range_dimension))
        for op in operators
        if tensor_shape.dimension_value(op.range_dimension) is not None
    ]
    if len(set(value for name, value in range_dimensions)) > 1:
        raise ValueError(
            "Operators must have the same range dimension. Found: %s" %
            range_dimensions)
예제 #4
0
    def _to_dense(self):
        """Generic and often inefficient implementation.  Override often."""
        if self.batch_shape.is_fully_defined():
            batch_shape = self.batch_shape
        else:
            batch_shape = self.batch_shape_tensor()

        dim_value = tensor_shape.dimension_value(self.domain_dimension)
        if dim_value is not None:
            n = dim_value
        else:
            n = self.domain_dimension_tensor()

        eye = linalg_ops.eye(num_rows=n,
                             batch_shape=batch_shape,
                             dtype=self.dtype)
        return self.matmul(eye)
예제 #5
0
    def range_dimension_tensor(self, name="range_dimension_tensor"):
        """Dimension (in the sense of vector spaces) of the range of this operator.

    Determined at runtime.

    If this operator acts like the batch matrix `A` with
    `_ops.TensorShape(A.shape) = [B1,...,Bb, M, N]`, then this returns `M`.

    Args:
      name:  A name for this `Op`.

    Returns:
      `int32` `Tensor`
    """
        # Derived classes get this "for free" once .shape() is implemented.
        with self._name_scope(name):
            # Prefer to use statically defined shape if available.
            dim_value = tensor_shape.dimension_value(self.range_dimension)
            if dim_value is not None:
                return ops.convert_to_tensor(dim_value)
            else:
                return self.shape_tensor()[-2]