def logical_xor(x, y, name="LogicalXor"): """x ^ y = (x | y) & ~(x & y).""" # TODO(alemi) Make this a cwise op if people end up relying on it. return gen_math_ops.logical_and( gen_math_ops.logical_or(x, y), gen_math_ops.logical_not(gen_math_ops.logical_and(x, y)), name=name)
def __body(w_, e_, mask, b): e = math_ops.cast(distributions.Beta((self.__mf - 1.0) / 2.0, (self.__mf - 1.0) / 2.0). sample(shape, seed=seed), dtype=self.dtype) u = random_ops.random_uniform(shape, dtype=self.dtype, seed=seed) w = (1.0 - (1.0 + b) * e) / (1.0 - (1.0 - b) * e) x = (1.0 - b) / (1.0 + b) c = self.scale * x + (self.__mf - 1) * math_ops.log1p(-x**2) tmp = tf.clip_by_value(x * w, 0, 1 - 1e-16) reject = gen_math_ops.less(((self.__mf - 1.0) * math_ops.log(1.0 - tmp) + self.scale * w - c), math_ops.log(u)) accept = gen_math_ops.logical_not(reject) w_ = array_ops.where(gen_math_ops.logical_and(mask, accept), w, w_) e_ = array_ops.where(gen_math_ops.logical_and(mask, accept), e, e_) mask = array_ops.where(gen_math_ops.logical_and(mask, accept), reject, mask) return w_, e_, mask, b
def __body(w_, e_, bool_mask, b, a, d): e = math_ops.cast(Beta((self.__mf - 1) / 2, (self.__mf - 1) / 2).sample(shape, seed=seed), dtype=self.dtype) u = random_ops.random_uniform(shape, dtype=self.dtype, seed=seed) w = (1 - (1 + b) * e) / (1 - (1 - b) * e) t = (2 * a * b) / (1 - (1 - b) * e) accept = gen_math_ops.greater( ((self.__mf - 1) * math_ops.log(t) - t + d), math_ops.log(u)) reject = gen_math_ops.logical_not(accept) w_ = array_ops.where(gen_math_ops.logical_and(bool_mask, accept), w, w_) e_ = array_ops.where(gen_math_ops.logical_and(bool_mask, accept), e, e_) bool_mask = array_ops.where( gen_math_ops.logical_and(bool_mask, accept), reject, bool_mask) return w_, e_, bool_mask, b, a, d
def get_switch_op(): return state_ops.assign( self._is_row_sweep_var, gen_math_ops.logical_not(self._is_row_sweep_var)).op
def not_(a): """Functional form of "not".""" if tensor_util.is_tensor(a): return gen_math_ops.logical_not(a) return not a
def _tf_not(a): """Implementation of the "not_" operator for TensorFlow.""" return gen_math_ops.logical_not(a)