def _CoshGrad(op, grad):
  """Returns grad * sinh(x)."""
  x = op.inputs[0]
  with ops.control_dependencies([grad]):
    x = math_ops.conj(x)
    return grad * math_ops.sinh(x)
def _AcoshGrad(op, grad):
  """Returns grad * 1/sinh(y)."""
  y = op.outputs[0]
  with ops.control_dependencies([grad]):
    y = math_ops.conj(y)
    return grad / math_ops.sinh(y)
 def _forward(self, x):
   return math_ops.sinh((math_ops.asinh(x) + self.skewness) * self.tailweight)
 def _inverse(self, y):
   return math_ops.sinh(math_ops.asinh(y) / self.tailweight - self.skewness)
Exemple #5
0
def _AcoshGrad(op, grad):
    """Returns grad * 1/sinh(y)."""
    y = op.outputs[0]
    with ops.control_dependencies([grad]):
        y = math_ops.conj(y)
        return grad / math_ops.sinh(y)
Exemple #6
0
def _CoshGrad(op, grad):
    """Returns grad * sinh(x)."""
    x = op.inputs[0]
    with ops.control_dependencies([grad]):
        x = math_ops.conj(x)
        return grad * math_ops.sinh(x)
Exemple #7
0
 def _inverse(self, y):
   return math_ops.sinh(math_ops.asinh(y) / self.tailweight - self.skewness)
Exemple #8
0
 def _forward(self, x):
   return math_ops.sinh((math_ops.asinh(x) + self.skewness) * self.tailweight)
Exemple #9
0
 def exp_map(self, v, x):
     # eq (9), v is the gradient
     vnorm = math_ops.sqrt(self.lorentz_scalar_product(v, v))
     return math_ops.cosh(vnorm) * x + math_ops.sinh(vnorm) * v / vnorm