Exemple #1
0
class Test(unittest.TestCase):
    def setUp(self):
        def op(x):
            return x

        self.cache = Cacher(op, 1)

    def test_pickling(self):
        self.assertRaises(PickleError, self.cache.__getstate__)
        self.assertRaises(PickleError, self.cache.__setstate__)

    def test_copy(self):
        tmp = self.cache.__deepcopy__()
        assert (tmp.operation is self.cache.operation)
        self.assertEqual(tmp.limit, self.cache.limit)

    def test_reset(self):
        self.cache.reset()
        self.assertDictEqual(
            self.cache.cached_input_ids,
            {},
        )
        self.assertDictEqual(
            self.cache.cached_outputs,
            {},
        )
        self.assertDictEqual(
            self.cache.inputs_changed,
            {},
        )

    def test_name(self):
        assert (self.cache.__name__ == self.cache.operation.__name__)
Exemple #2
0
class Test(unittest.TestCase):
    def setUp(self):
        def op(x):
            return x
        self.cache = Cacher(op, 1)

    def test_pickling(self):
        self.assertRaises(PickleError, self.cache.__getstate__)
        self.assertRaises(PickleError, self.cache.__setstate__)

    def test_copy(self):
        tmp = self.cache.__deepcopy__()
        assert(tmp.operation is self.cache.operation)
        self.assertEqual(tmp.limit, self.cache.limit)

    def test_reset(self):
        self.cache.reset()
        self.assertDictEqual(self.cache.cached_input_ids, {}, )
        self.assertDictEqual(self.cache.cached_outputs, {}, )
        self.assertDictEqual(self.cache.inputs_changed, {}, )

    def test_name(self):
        assert(self.cache.__name__ == self.cache.operation.__name__)
Exemple #3
0
        }
        """
        weave.inline(code,
                     support_code=support_code,
                     arg_names=[
                         'psi1', 'psi2n', 'N', 'M', 'Q', 'variance', 'l2', 'Z',
                         'mu', 'S', 'gamma', 'log_denom1', 'log_denom2',
                         'log_gamma', 'log_gamma1'
                     ],
                     type_converters=weave.converters.blitz)

        psi2 = psi2n.sum(axis=0)
        return psi0, psi1, psi2, psi2n

    from GPy.util.caching import Cacher
    psicomputations = Cacher(_psicomputations, limit=1)

    def psiDerivativecomputations(dL_dpsi0, dL_dpsi1, dL_dpsi2, variance,
                                  lengthscale, Z, variational_posterior):
        ARD = (len(lengthscale) != 1)

        _, psi1, _, psi2n = psicomputations(variance, lengthscale, Z,
                                            variational_posterior)

        mu = variational_posterior.mean
        S = variational_posterior.variance
        gamma = variational_posterior.binary_prob
        N, M, Q = mu.shape[0], Z.shape[0], mu.shape[1]
        l2 = np.square(lengthscale)
        log_denom1 = np.log(S / l2 + 1)
        log_denom2 = np.log(2 * S / l2 + 1)
Exemple #4
0
    def setUp(self):
        def op(x):
            return x

        self.cache = Cacher(op, 1)
Exemple #5
0
 def setUp(self):
     def op(x):
         return x
     self.cache = Cacher(op, 1)
Exemple #6
0
    lengthscale2 = np.square(lengthscale)
    denom = 1. / (2 * S + lengthscale2)
    denom2 = np.square(denom)

    _psi2 = _psi2computations(variance, lengthscale, Z, mu, S)  # NxMxM
    Lpsi2 = dL_dpsi2 * _psi2  # dL_dpsi2 is MxM, using broadcast to multiply N out
    Lpsi2sum = np.einsum('nmo->n', Lpsi2)  #N
    Lpsi2Z = np.einsum('nmo,oq->nq', Lpsi2, Z)  #NxQ
    Lpsi2Z2 = np.einsum('nmo,oq,oq->nq', Lpsi2, Z, Z)  #NxQ
    Lpsi2Z2p = np.einsum('nmo,mq,oq->nq', Lpsi2, Z, Z)  #NxQ
    Lpsi2Zhat = Lpsi2Z
    Lpsi2Zhat2 = (Lpsi2Z2 + Lpsi2Z2p) / 2

    _dL_dvar = Lpsi2sum.sum() * 2 / variance
    _dL_dmu = (-2 * denom) * (mu * Lpsi2sum[:, None] - Lpsi2Zhat)
    _dL_dS = (2 * np.square(denom)) * (np.square(mu) * Lpsi2sum[:, None] -
                                       2 * mu * Lpsi2Zhat +
                                       Lpsi2Zhat2) - denom * Lpsi2sum[:, None]
    _dL_dZ = -np.einsum('nmo,oq->oq',Lpsi2,Z)/lengthscale2+np.einsum('nmo,oq->mq',Lpsi2,Z)/lengthscale2+ \
             2*np.einsum('nmo,nq,nq->mq',Lpsi2,mu,denom) - np.einsum('nmo,nq,mq->mq',Lpsi2,denom,Z) - np.einsum('nmo,oq,nq->mq',Lpsi2,Z,denom)
    _dL_dl = 2 * lengthscale * (
        (S / lengthscale2 * denom + np.square(mu * denom)) * Lpsi2sum[:, None]
        + (Lpsi2Z2 - Lpsi2Z2p) / (2 * np.square(lengthscale2)) -
        (2 * mu * denom2) * Lpsi2Zhat + denom2 * Lpsi2Zhat2).sum(axis=0)

    return _dL_dvar, _dL_dl, _dL_dZ, _dL_dmu, _dL_dS


_psi1computations = Cacher(__psi1computations, limit=5)
_psi2computations = Cacher(__psi2computations, limit=5)