def __init__(self): self.real_dim = 2 self.active_dim = 1 self.no_samples = 5 self.kernel = TripathyMaternKernel(self.real_dim, self.active_dim) # Parameters self.sn = 2. self.W = self.kernel.sample_W() self.function = Parabola() self.real_W = np.asarray([ [1], [1] ]) self.real_W = self.real_W / np.linalg.norm(self.real_W) self.X = np.random.rand(self.no_samples, self.real_dim) Z = np.dot(self.X, self.real_W).reshape((-1, 1)) self.Y = self.function.f(Z.T).squeeze() self.w_optimizer = t_WOptimizer( self.kernel, # TODO: does the kernel take over the W? self.sn, np.asscalar(self.kernel.inner_kernel.variance), self.kernel.inner_kernel.lengthscale, self.X, self.Y ) # Define the plotting variables self.tau_arr = np.linspace(0., self.w_optimizer.tau_max, 100)
def __init__(self): self.real_dim = 2 self.active_dim = 1 self.no_samples = 50 self.kernel = TripathyMaternKernel(self.real_dim, self.active_dim) # Parameters self.sn = 0.1 self.W = self.kernel.sample_W() self.function = Parabola() self.real_W = np.asarray([ [1], [1] ]) self.real_W = self.real_W / np.linalg.norm(self.real_W) self.X = np.random.rand(self.no_samples, self.real_dim) Z = np.dot(self.X, self.real_W) self.Y = self.function.f(Z.T).reshape(-1, 1) self.w_optimizer = t_WOptimizer( self.kernel, # TODO: does the kernel take over the W? self.sn, np.asscalar(self.kernel.inner_kernel.variance), self.kernel.inner_kernel.lengthscale, self.X, self.Y ) self.no_tries = 1000
def init(self): self.real_dim = 2 self.active_dim = 1 self.no_samples = 75 self.kernel = TripathyMaternKernel(self.real_dim, self.active_dim) # Hide the matrix over here! if self.real_dim == 3 and self.active_dim == 2: self.function = Camelback() self.real_W = np.asarray([ [0, 1], [1, 0], [0, 0] ]) elif self.real_dim == 2 and self.active_dim == 1: self.function = Parabola() self.real_W = np.asarray([ [1], [1], ]) self.real_W = self.real_W / np.linalg.norm(self.real_W) else: assert False, "W was not set!" self.sn = 0.1 self.X = np.random.rand(self.no_samples, self.real_dim) Z = np.dot(self.X, self.real_W) self.Y = self.function.f(Z.T).reshape(-1, 1) self.w_optimizer = t_WOptimizer( self.kernel, self.sn, np.asscalar(self.kernel.inner_kernel.variance), self.kernel.inner_kernel.lengthscale, self.X, self.Y ) # We create the following kernel just to have access to the sample_W function! # TripathyMaternKernel(self.real_dim) self.tries = 10 self.max_iter = 1 # 150 assert False self.metrics = Metrics(self.no_samples)
def init(self): self.real_dim = 3 self.active_dim = 2 self.no_samples = 5 self.kernel = TripathyMaternKernel(self.real_dim, self.active_dim) self.W = self.kernel.sample_W() self.sn = 2. self.function = Rosenbrock() self.real_W = np.asarray([[0, 0], [0, 1], [1, 0]]) self.function = Rosenbrock() self.X = np.random.rand(self.no_samples, self.real_dim) Z = np.dot(self.X, self.real_W) self.Y = self.function.f(Z.T) self.w_optimizer = t_WOptimizer( self.kernel, self.sn, np.asscalar(self.kernel.inner_kernel.variance), self.kernel.inner_kernel.lengthscale, self.X, self.Y)
def __init__(self): self.real_dim = 2 self.active_dim = 1 self.no_samples = 100 self.kernel = TripathyMaternKernel(self.real_dim, self.active_dim) # Parameters self.sn = 0.1 # 1e-7 # 0.1 self.W = self.kernel.sample_W() self.function = AugmentedSinusoidal() self.real_W = np.asarray([ [3], [1] ]) self.real_W = self.real_W / np.linalg.norm(self.real_W) # [[0.9486833] # [0.31622777]] x_range = np.linspace(0., 1., int(np.sqrt(self.no_samples))) y_range = np.linspace(0., 1., int(np.sqrt(self.no_samples))) self.X = cartesian([x_range, y_range]) #self.X = np.random.rand(self.no_samples, self.real_dim) print(self.X.shape) Z = np.dot(self.X, self.real_W).reshape(-1, 1) print(Z.shape) self.Y = self.function.f(Z.T).reshape(-1, 1) self.w_optimizer = t_WOptimizer( self.kernel, # TODO: does the kernel take over the W? self.sn, np.asscalar(self.kernel.inner_kernel.variance), self.kernel.inner_kernel.lengthscale, self.X, self.Y ) self.no_tries = 1000 self.PLOT_MEAN = True