Beispiel #1
0
    def jacobian_det(self, rv_var, rv_value):
        a, b = self.param_extract_fn(rv_var)

        if a is not None and b is not None:
            s = at.softplus(-rv_value)
            return at.log(b - a) - 2 * s - rv_value
        else:
            return rv_value
Beispiel #2
0
 def make_uw(self, u, w):
     if not self.batched:
         # u_: d
         # w_: d
         wu = u.dot(w)  # .
         mwu = -1.0 + at.softplus(wu)  # .
         # d + (. - .) * d / .
         u_h = u + (mwu - wu) * w / ((w**2).sum() + 1e-10)
         return u_h, w
     else:
         # u_: bxd
         # w_: bxd
         wu = (u * w).sum(-1, keepdims=True)  # bx-
         mwu = -1.0 + at.softplus(wu)  # bx-
         # bxd + (bx- - bx-) * bxd / bx- = bxd
         u_h = u + (mwu - wu) * w / ((w**2).sum(-1, keepdims=True) + 1e-10)
         return u_h, w
Beispiel #3
0
def rho2sigma(rho):
    """
    `rho -> sigma` Aesara converter
    :math:`mu + sigma*e = mu + log(1+exp(rho))*e`"""
    return at.softplus(rho)
Beispiel #4
0
 def test_accuracy(self):
     # Test all aproximations are working (cutoff points are -37, 18, 33.3)
     x_test = np.array([-40.0, -17.5, 17.5, 18.5, 40.0])
     y_th = aet.softplus(x_test).eval()
     y_np = np.log1p(np.exp(x_test))
     np.testing.assert_allclose(y_th, y_np, rtol=10e-10)
Beispiel #5
0
 def jacobian_det(self, rv_var, rv_value):
     return -at.softplus(-rv_value)
Beispiel #6
0
 def backward(self, rv_var, rv_value):
     return at.softplus(rv_value)
Beispiel #7
0
 def log_jac_det(self, value, *inputs):
     return -at.softplus(-value)
Beispiel #8
0
 def backward(self, value, *inputs):
     return at.softplus(value)
Beispiel #9
0
 def make_ab(self, a, b):
     a = at.exp(a)
     b = -a + at.softplus(b)
     return a, b
Beispiel #10
0
def log1pexp(x):
    """Return log(1 + exp(x)), also called softplus.

    This function is numerically more stable than the naive approach.
    """
    return at.softplus(x)