def _to_dense(self): reflection_axis = ops.convert_to_tensor( self.reflection_axis) normalized_axis = nn.l2_normalize(reflection_axis, axis=-1) mat = normalized_axis[..., _ops.newaxis] matrix = -2 * _linalg.matmul(mat, mat, adjoint_b=True) return _linalg.set_diag( matrix, 1. + _linalg.diag_part(matrix))
def _matmul(self, x, adjoint=False, adjoint_arg=False): # Given a vector `v`, we would like to reflect `x` about the hyperplane # orthogonal to `v` going through the origin. We first project `x` to `v` # to get v * dot(v, x) / dot(v, v). After we project, we can reflect the # projection about the hyperplane by flipping sign to get # -v * dot(v, x) / dot(v, v). Finally, we can add back the component # that is orthogonal to v. This is invariant under reflection, since the # whole hyperplane is invariant. This component is equal to x - v * dot(v, # x) / dot(v, v), giving the formula x - 2 * v * dot(v, x) / dot(v, v) # for the reflection. # Note that because this is a reflection, it lies in O(n) (for real vector # spaces) or U(n) (for complex vector spaces), and thus is its own adjoint. reflection_axis = ops.convert_to_tensor(self.reflection_axis) x = linalg.adjoint(x) if adjoint_arg else x normalized_axis = nn.l2_normalize(reflection_axis, axis=-1) mat = normalized_axis[..., _ops.newaxis] x_dot_normalized_v = _linalg.matmul(mat, x, adjoint_a=True) return x - 2 * mat * x_dot_normalized_v
def _diag_part(self): reflection_axis = ops.convert_to_tensor( self.reflection_axis) normalized_axis = nn.l2_normalize(reflection_axis, axis=-1) return 1. - 2 * normalized_axis * math_ops.conj(normalized_axis)