예제 #1
0
 def testFloat32Fallback(self):
     # Should be OK (float32 fallback).
     self.polynomial = psd_kernels.Polynomial(bias_variance=0,
                                              slope_variance=1,
                                              exponent=1)
     # Should be OK.
     psd_kernels.Polynomial(bias_variance=np.float32(1.),
                            slope_variance=1.,
                            exponent=1.)
예제 #2
0
 def testValidateArgsNonPositiveAreBad(self):
     with self.assertRaisesOpError('Condition x > 0 did not hold'):
         k = psd_kernels.Polynomial(bias_variance=-1., validate_args=True)
         self.evaluate(k.bias_variance)
     with self.assertRaisesOpError('Condition x > 0 did not hold'):
         k = psd_kernels.Polynomial(slope_variance=-1., validate_args=True)
         self.evaluate(k.slope_variance)
     with self.assertRaisesOpError('Condition x > 0 did not hold'):
         k = psd_kernels.Polynomial(exponent=-1., validate_args=True)
         self.evaluate(k.exponent)
예제 #3
0
 def testFloat32Fallback(self):
   # Should be OK (float32 fallback).
   self.polynomial = tfpk.Polynomial(
       bias_variance=0,
       slope_variance=1,
       shift=0,
       exponent=1)
   # Should be OK.
   tfpk.Polynomial(
       bias_variance=np.float32(1.),
       slope_variance=1.,
       shift=0.,
       exponent=1.)
예제 #4
0
 def testValidateArgsNoneIsOk(self):
     # No exception expected
     k = psd_kernels.Polynomial(bias_variance=None,
                                slope_variance=None,
                                exponent=None,
                                validate_args=True)
     self.evaluate(k.apply([[1.]], [[1.]]))
예제 #5
0
 def testBatchShape(self, bias_variance, slope_variance, exponent, shape):
     k = psd_kernels.Polynomial(bias_variance=bias_variance,
                                slope_variance=slope_variance,
                                exponent=exponent,
                                validate_args=True)
     self.assertAllEqual(k.batch_shape.as_list(), shape)
     self.assertAllEqual(self.evaluate(k.batch_shape_tensor()), shape)
예제 #6
0
 def test_mismatched_float_types_are_bad(self):
   with self.assertRaises(TypeError):
     psd_kernels.Polynomial(
         bias_variance=np.float32(1.),
         slope_variance=np.float64(1.),
         exponent=1.
     )
예제 #7
0
 def testValidateArgsNonPositiveAreBad(self):
   with self.assertRaisesOpError('Condition x > 0 did not hold'):
     k = tfpk.Polynomial(
         bias_variance=-1.,
         validate_args=True)
     self.evaluate(k.apply([1.], [1.]))
   with self.assertRaisesOpError('Condition x > 0 did not hold'):
     k = tfpk.Polynomial(
         slope_variance=-1.,
         validate_args=True)
     self.evaluate(k.apply([1.], [1.]))
   with self.assertRaisesOpError('Condition x > 0 did not hold'):
     k = tfpk.Polynomial(
         exponent=-1.,
         validate_args=True)
     self.evaluate(k.apply([1.], [1.]))
예제 #8
0
 def testShapesAreCorrectBroadcast(self):
     k = psd_kernels.Polynomial(bias_variance=np.ones([2, 1, 1],
                                                      np.float32),
                                slope_variance=np.ones([1, 3, 1],
                                                       np.float32))
     self.assertAllEqual(
         k.matrix(np.ones([2, 4, 3], np.float32),
                  np.ones([2, 5, 3], np.float32)).shape, [2, 3, 2, 4, 5])
예제 #9
0
 def testShapesAreCorrectMatrix(self, feature_ndims, x_shape, y_shape,
                                matrix_shape):
     k = psd_kernels.Polynomial(bias_variance=0.,
                                slope_variance=1.,
                                exponent=1.,
                                feature_ndims=feature_ndims)
     x = np.ones(x_shape, np.float32)
     y = np.ones(y_shape, np.float32)
     self.assertAllEqual(k.matrix(x, y).shape, matrix_shape)
예제 #10
0
 def testFloat64(self):
     # No exception expected
     k = psd_kernels.Polynomial(bias_variance=np.float64(0.),
                                slope_variance=np.float64(1.),
                                exponent=np.float64(1.),
                                feature_ndims=1)
     x = np.ones([5, 3], np.float64)
     y = np.ones([5, 3], np.float64)
     k.apply(x, y)
예제 #11
0
 def testFloat32(self):
     # No exception expected
     k = psd_kernels.Polynomial(bias_variance=0.,
                                slope_variance=1.,
                                exponent=1.,
                                feature_ndims=1)
     x = np.ones([5, 3], np.float32)
     y = np.ones([5, 3], np.float32)
     k.apply(x, y)
예제 #12
0
 def testBatchShape(self, bias_variance, slope_variance,
                    shift, exponent, shape):
   k = tfpk.Polynomial(
       bias_variance=bias_variance,
       slope_variance=slope_variance,
       shift=shift,
       exponent=exponent,
       validate_args=True)
   self.assertAllEqual(shape, k.batch_shape.as_list())
   self.assertAllEqual(shape, self.evaluate(k.batch_shape_tensor()))
예제 #13
0
 def testValuesAreCorrect(self):
     bias_variance = 1.5
     slope_variance = 0.5
     exponent = 2
     k = psd_kernels.Polynomial(bias_variance=bias_variance,
                                slope_variance=slope_variance,
                                exponent=exponent)
     x = np.random.uniform(-1, 1, size=[5, 3]).astype(np.float32)
     y = np.random.uniform(-1, 1, size=[4, 3]).astype(np.float32)
     self.assertAllClose(
         self.evaluate(k.matrix(x, y)),
         bias_variance**2 + slope_variance**2 * (x.dot(y.T))**exponent)
예제 #14
0
 def testShapesAreCorrectApply(self, feature_ndims,
                               x_shape, y_shape, apply_shape):
   k = tfpk.Polynomial(
       bias_variance=0.,
       slope_variance=1.,
       shift=0.,
       exponent=1.,
       feature_ndims=feature_ndims)
   x = np.ones(x_shape, np.float32)
   y = np.ones(y_shape, np.float32)
   self.assertAllEqual(
       apply_shape, k.apply(x, y).shape)
예제 #15
0
 def testShapesAreCorrectBroadcast(self):
   k = tfpk.Polynomial(
       bias_variance=np.ones([2, 1, 1], np.float32),
       slope_variance=np.ones([1, 3, 1], np.float32))
   self.assertAllEqual(
       [2, 3, 2, 4, 5],
       #`--'  |  `--'
       #  |   |    `- matrix shape
       #  |   `- from input batch shapes
       #  `- from broadcasting kernel params
       k.matrix(
           np.ones([2, 4, 3], np.float32),
           np.ones([2, 5, 3], np.float32)
       ).shape)
예제 #16
0
 def testValuesAreCorrect(self):
   bias_variance = 1.5
   slope_variance = 0.5
   shift = 1.
   exponent = 2
   k = tfpk.Polynomial(
       bias_variance=bias_variance,
       slope_variance=slope_variance,
       shift=shift,
       exponent=exponent
   )
   x = np.random.uniform(-1, 1, size=[5, 3]).astype(np.float32)
   y = np.random.uniform(-1, 1, size=[4, 3]).astype(np.float32)
   self.assertAllClose(
       (bias_variance ** 2 + slope_variance ** 2 *
        ((x - shift).dot((y - shift).T)) ** exponent),
       self.evaluate(k.matrix(x, y))
   )
예제 #17
0
 def testNoneShapes(self):
   k = tfpk.Polynomial(
       bias_variance=np.reshape(np.arange(12.), [2, 3, 2]))
   self.assertEqual([2, 3, 2], k.batch_shape.as_list())
예제 #18
0
 def testShifttNonPositiveIsOk(self):
   # No exception expected
   k = tfpk.Polynomial(
       shift=-1.,
       validate_args=True)
   self.evaluate(k.apply([1.], [1.]))
예제 #19
0
 def testShifttNonPositiveIsOk(self):
     # No exception expected
     k = psd_kernels.Polynomial(shift=-1., validate_args=True)
     self.evaluate(k.shift)