Example #1
0
    def test_seed(self):
        with self.test_context() as session:
            length = 10
            arr = np.random.randn(length, 2)
            batch_size = 2
            m1 = gpflow.Minibatch(arr, seed=1, batch_size=batch_size)
            m2 = gpflow.Minibatch(arr, seed=1, batch_size=batch_size)

            self.assertEqual(m1.is_built_coherence(), gpflow.Build.YES)
            self.assertEqual(m1.seed, 1)
            with self.assertRaises(gpflow.GPflowError):
                m1.seed = 10

            self.assertEqual(m2.is_built_coherence(), gpflow.Build.YES)
            self.assertEqual(m2.seed, 1)
            with self.assertRaises(gpflow.GPflowError):
                m2.seed = 10

            self.assertEqual(m1.seed, 1)
            self.assertEqual(m2.seed, 1)
            for i in range(length):
                m1_value = m1.read_value(session=session)
                m2_value = m2.read_value(session=session)
                self.assertEqual(m1_value.shape[0],
                                 batch_size,
                                 msg='Index range "{}"'.format(i))
                self.assertEqual(m2_value.shape[0],
                                 batch_size,
                                 msg='Index range "{}"'.format(i))
                assert_allclose(m1_value, m2_value)
Example #2
0
    def test_change_batch_size(self):
        with self.test_context() as session:
            length = 10
            arr = np.random.randn(length, 2)
            m = gpflow.Minibatch(arr, shuffle=False)
            for i in range(length):
                assert_allclose(m.read_value(session=session), [arr[i]])

            def check_batch_size(m, length, batch_size):
                self.assertEqual(m.batch_size, batch_size)
                for i in range(length // batch_size):
                    value = m.read_value(session=session)
                    self.assertEqual(value.shape[0],
                                     batch_size,
                                     msg='Index range "{}"'.format(i))

            batch_size = 2
            m.set_batch_size(batch_size)
            check_batch_size(m, length, batch_size)

            batch_size = 5
            m.batch_size = batch_size
            check_batch_size(m, length, batch_size)

            batch_size = 10
            m.set_batch_size(batch_size)
            check_batch_size(m, length, batch_size)
Example #3
0
 def __init__(self,
              obs_weight,
              X,
              Y,
              kern,
              likelihood,
              feat=None,
              mean_function=None,
              num_latent=None,
              q_diag=False,
              whiten=True,
              minibatch_size=None,
              Z=None,
              num_data=None,
              **kwargs):
     super(WeightedSVGP, self).__init__(X,
                                        Y,
                                        kern,
                                        likelihood,
                                        feat=feat,
                                        mean_function=mean_function,
                                        num_latent=num_latent,
                                        q_diag=q_diag,
                                        whiten=whiten,
                                        minibatch_size=None,
                                        Z=Z,
                                        num_data=num_data,
                                        **kwargs)
     self.obs_weight = gp.DataHolder(
         obs_weight) if minibatch_size is None else gp.Minibatch(
             obs_weight, batch_size=minibatch_size, seed=0)
Example #4
0
 def __init__(self,weights, X, Y, Z, kernels, likelihood, 
              num_outputs=None,
              mean_function=gp.mean_functions.Zero(),  # the final layer mean function
              **kwargs):
     super(WeightedDGP,self).__init__(X, Y, Z, kernels, likelihood, 
              num_outputs=num_outputs,
              mean_function=mean_function,  # the final layer mean function
              **kwargs)
     minibatch_size = 128
     if minibatch_size:
         self.weights = gp.Minibatch(weights, minibatch_size, seed=0)
     else:
         self.weights = gp.DataHolder(weights)
Example #5
0
    def test_change_variable_size(self):
        with self.test_context() as session:
            m = gpflow.Parameterized()
            length = 10
            arr = np.random.randn(length, 2)
            m.X = gpflow.Minibatch(arr, shuffle=False)
            for i in range(length):
                assert_allclose(m.X.read_value(session=session), [arr[i]])

            length = 20
            arr = np.random.randn(length, 2)
            m.X = arr
            for i in range(length):
                assert_allclose(m.X.read_value(session=session), [arr[i]])
Example #6
0
    def test_clear(self):
        with self.test_context() as session:
            length = 10
            seed = 10
            arr = np.random.randn(length, 2)
            m = gpflow.Minibatch(arr, shuffle=False)
            self.assertEqual(m.is_built_coherence(), gpflow.Build.YES)
            self.assertEqual(m.seed, None)
            with self.assertRaises(gpflow.GPflowError):
                m.seed = seed
            self.assertEqual(m.seed, None)
            for i in range(length):
                assert_allclose(m.read_value(session=session), [arr[i]])

            m.clear()
            self.assertEqual(m.seed, None)
            m.seed = seed
            self.assertEqual(m.seed, seed)
            self.assertEqual(m.is_built_coherence(), gpflow.Build.NO)
            self.assertEqual(m.parameter_tensor, None)
Example #7
0
 def test_create(self):
     with self.test_context():
         values = [tf.get_variable('test', shape=()), "test", None]
         for v in values:
             with self.assertRaises(ValueError):
                 gpflow.Minibatch(v)