def _CoshGrad(op, grad): """Returns grad * sinh(x).""" x = op.inputs[0] with ops.control_dependencies([grad]): x = math_ops.conj(x) return grad * math_ops.sinh(x)
def _AcoshGrad(op, grad): """Returns grad * 1/sinh(y).""" y = op.outputs[0] with ops.control_dependencies([grad]): y = math_ops.conj(y) return grad / math_ops.sinh(y)
def _forward(self, x): return math_ops.sinh((math_ops.asinh(x) + self.skewness) * self.tailweight)
def _inverse(self, y): return math_ops.sinh(math_ops.asinh(y) / self.tailweight - self.skewness)
def _AcoshGrad(op, grad): """Returns grad * 1/sinh(y).""" y = op.outputs[0] with ops.control_dependencies([grad]): y = math_ops.conj(y) return grad / math_ops.sinh(y)
def _CoshGrad(op, grad): """Returns grad * sinh(x).""" x = op.inputs[0] with ops.control_dependencies([grad]): x = math_ops.conj(x) return grad * math_ops.sinh(x)
def _inverse(self, y): return math_ops.sinh(math_ops.asinh(y) / self.tailweight - self.skewness)
def _forward(self, x): return math_ops.sinh((math_ops.asinh(x) + self.skewness) * self.tailweight)
def exp_map(self, v, x): # eq (9), v is the gradient vnorm = math_ops.sqrt(self.lorentz_scalar_product(v, v)) return math_ops.cosh(vnorm) * x + math_ops.sinh(vnorm) * v / vnorm