def kernel(self, points1, points2=None, degree=0, depth=1): arc_cosine = kernels.ArcCosine(degree, depth, white=0.0) if points2 is not None: return tf.Session().run(arc_cosine.kernel(np.array(points1, dtype=np.float32), np.array(points2, dtype=np.float32))) else: return tf.Session().run(arc_cosine.kernel(np.array(points1, dtype=np.float32)))
def kernel(cls, points1, points2=None, degree=0, depth=1): arc_cosine = kernels.ArcCosine(degree, depth, white=0.0) cls.session.run(tf.global_variables_initializer()) if points2 is not None: return cls.session.run( arc_cosine.kernel(np.array(points1, dtype=np.float32), np.array(points2, dtype=np.float32))) else: return cls.session.run( arc_cosine.kernel(np.array(points1, dtype=np.float32)))
def diag_kernel(self, points, degree=0, depth=1): arc_cosine = kernels.ArcCosine(degree, depth, white=0.0) return tf.Session().run(arc_cosine.diag_kernel(np.array(points, dtype=np.float32)))
test_data = pd.read_csv(TEST_PATH, sep=r"\s+", header=None) train_X = train_data.values[:, :-1] train_Y = train_data.values[:, -1:] test_X = test_data.values[:, :-1] test_Y = test_data.values[:, -1:] data = datasets.DataSet(train_X, train_Y) test = datasets.DataSet(test_X, test_Y) Z = init_z(data.X, NUM_INDUCING) likelihood = likelihoods.Logistic() # Setup initial values for the model. if KERNEL == 'arccosine': kern = [ kernels.ArcCosine(data.X.shape[1], degree=DEGREE, depth=DEPTH, lengthscale=LENGTHSCALE, std_dev=1.0, input_scaling=IS_ARD) for i in range(1) ] else: kern = [ kernels.RadialBasis(data.X.shape[1], lengthscale=LENGTHSCALE, input_scaling=IS_ARD) for i in range(1) ] print("Using Kernel " + KERNEL) m = autogp.GaussianProcess(likelihood, kern, Z,
def diag_kernel(cls, points, degree=0, depth=1): arc_cosine = kernels.ArcCosine(degree, depth, white=0.0) cls.session.run(tf.global_variables_initializer()) return cls.session.run( arc_cosine.diag_kernel(np.array(points, dtype=np.float32)))