def pp_ackley(xi, x): lower = [-32.768] * len(xi) upper = [32.768] * len(xi) #Alpha bounds!! lower, upper = alpha_bounds(xi, lower, upper) bounds = list(zip([lower], [upper])) res = scipy.optimize.differential_evolution(ackley, bounds, args=(xi, x), updating='immediate', disp=False, tol=0.001) return float(res.x)
def xi_grid(self, xi, x=None, alpha_grid_distribution=None, alpha_star=None, m=None, is_scaled=False): if alpha_grid_distribution is None: alpha_grid_distribution = self.alpha_grid_distribution if m is None: m = self.m ''' Creates a grid of points for the dimension d. If concenrated_on_feedback is selected, then also a point alpha_star should be provided. Grid points will be concentrated around that point. ''' if is_scaled: alpha_min = 0 alpha_max = 1 else: lower = np.array([i[0] for i in self.original_bounds]) upper = np.array([i[1] for i in self.original_bounds]) alpha_min, alpha_max = alpha_bounds(xi, lower, upper) if alpha_grid_distribution == 'evenly': noise_level_gridpoints = 0.01 #Add random noise to grid points to avoid singluraity issues in datamatrix. Numbers present noise variance as a percentage of the length of the interval of variable bounds: 0.01 is good starting point epsilon_boundary = (alpha_max - alpha_min) * ( noise_level_gridpoints / 2) #Number away from the boundary epsilon_noise = np.abs( alpha_max - alpha_min ) * noise_level_gridpoints #Level of noise depends on noise level parameter and length of the interval of var bounds alpha = [] while len(alpha) != m: alpha = np.linspace(alpha_min + epsilon_boundary, alpha_max - epsilon_boundary, num=m) + np.random.normal( 0, epsilon_noise, m) alpha = np.clip(alpha, alpha_min, alpha_max) alpha = np.unique(alpha) #delete duplicates elif alpha_grid_distribution == 'cauchy': ''' Gridpoints are drawn from Cauchy-dsitribution with location alpha_star ''' alpha = [] while len(alpha) != m: noise_level_gridpoints = 0.07 #Add random noise to grid points to avoid singluraity issues in datamatrix. Numbers present noise variance as a percentage of the length of the interval of variable bounds: 0.07 is good starting point alpha = scipy.stats.cauchy.rvs( loc=float(alpha_star), scale=np.abs(alpha_max - alpha_min) * noise_level_gridpoints, size=m) alpha = np.clip(alpha, alpha_min, alpha_max) alpha = np.unique(alpha) #delete duplicates elif alpha_grid_distribution == 'TGN': ''' Gridpoints are drawn from Truncated Generalized Normal (TGN) distribution with the location parameter alpha_star and the form parameter gamma ''' ''' The speed of transformation from uniform distribution to normal distribution as iteration_number ---> infinity ''' s = self.TGN_speed #speed parameter that lies in (0,1] gamma = 3 / np.power(np.max([self.iter_number + 1 - self.D, 1]), s) + 2 alpha = [] while len(alpha) != m: alpha = TGN_sample(size=m, gamma=gamma, alpha=float(alpha_star), x_min=alpha_min, x_max=alpha_max) alpha = np.clip(alpha, alpha_min, alpha_max) alpha = np.unique(alpha) #delete duplicates alpha.shape = (m, 1) xi = np.array(xi).reshape(1, self.D) xi_grid = np.matmul(alpha, xi) if x is None: xi_column_indices = [ j for j in range(self.D) if j not in np.where((xi_grid == 0).all(0))[0] ] #Remove zero columns i.e. columns whrer xi_d = 0 ? xi_grid = xi_grid[:, xi_column_indices] else: # x_column_indices = np.where((xi_grid == 0).all(0))[0] #xi_grid[:,x_column_indices] = np.tile(x, (m, 1)) xi_grid = xi_grid + np.tile(x, (m, 1)) return xi_grid