def EagleStrategyWithSA(model,iters,rate): mn = MinMaxScaler(feature_range=(-3,3)) r = levy.rvs(size=iters) max_accuracy = 0 final_weights = [] count = 1 for i in list(r): weights = np.random.uniform(-1,1,120) weights = weights * i weights = mn.fit_transform(weights.reshape(-1,1)) acc = predict(weights,X_test,Y_test,model) if acc > 0.5: clip_max = 5 fitness = NetworkWeights(X_train,Y_train,[25,4,4,1],relu,True,True,rate) problem = ContinuousOpt(120,fitness,maximize=False,min_val=-1*clip_max,max_val=clip_max,step=rate) #print(problem.get_length(),len(weights)) fitted_weights, loss = simulated_annealing(problem,schedule=GeomDecay(),max_attempts=50,max_iters=300,init_state=weights,curve=False) acc = predict(fitted_weights,X_test,Y_test,model) if acc > max_accuracy: max_accuracy = acc final_weights = fitted_weights if max_accuracy > 0.95: break count+=1 return max_accuracy,count,final_weights
def book_scanning(inputfile, algorithm, greedy_injection): n_days, scores, libraries = scan_file( "input/" + inputfile) # scans the input file and saves all the necessary info t = datetime.datetime.now() # starts the timer solution = None if algorithm == 1: # greedy algorithm all_libraries = copy.deepcopy(libraries) solution = greedy( all_libraries, n_days, scores) # executes the greedy algorithm to get a solution else: solution = get_solution_to_optimize(inputfile, libraries, scores, n_days, greedy_injection) found_better = True if algorithm == 2: # local search - first neighbour while found_better: found_better, solution = find_first_neighbour( solution, libraries, scores) elif algorithm == 3: # local search - best neighbour while found_better: found_better, solution = find_best_neighbour( solution, libraries, scores) elif algorithm == 4: # local search - random neighbour for _ in range( 30 ): # executes random neighbour 30 times to give it a chance to find a better solution new_solution = random_neighbour(solution, libraries, scores, n_days, True) if new_solution.score > solution.score: # accepts new solution if its score is higher than the previous solution print("Found better:", new_solution.score) solution = new_solution if algorithm == 5: # simulated annealing solution = simulated_annealing(solution, libraries, scores, n_days) solution.print_solution(get_elapsed_time( t)) # calculates the elapsed time ans prints the solution write_output(inputfile, solution) # writes solution in the respective output file
nb_coords = 25 nb_experiments = 1 print('Permutation\tTransport\tReverse\tAnnealing\tGenetic') stopping_iteration = 500 # np.random.seed(0) # sum_delta = 0 for i in range(nb_experiments): coords = generate_random_coords(nb_coords) random_solution_permutation, _, permutation_steps = randomized_improvement( coords, permutation, stopping_iteration) random_solution_transport, _, transport_steps = randomized_improvement( coords, transport, stopping_iteration) random_solution_reverse, _, reverse_steps = randomized_improvement( coords, reverse, stopping_iteration) annealing_solution, _, annealing_steps = simulated_annealing( coords, stopping_iteration) genetic_solution, _, genetic_steps = genetic(coords, stopping_iteration) # delta = (random_solution_reverse - annealing_solution) / random_solution_reverse # sum_delta += delta print(f'{random_solution_permutation:.0f}', end='\t\t') print(f'{random_solution_transport:.0f}', end='\t\t') print(f'{random_solution_reverse:.0f}', end='\t\t') print(f'{annealing_solution:.0f}', end='\t\t') print(f'{genetic_solution:.0f}', end='\n') # print(f'{delta:.2f}', end='\n') # print(f'Average delta: {sum_delta/nb_experiments:.3f}') # Draw plots for last experiment
def fit(self, X, y=None, init_weights=None): """Fit neural network to data. Parameters ---------- X: array Numpy array containing feature dataset with each row representing a single observation. y: array Numpy array containing data labels. Length must be same as length of X. init_state: array, default: None Numpy array containing starting weights for algorithm. If :code:`None`, then a random state is used. """ self._validate() # Make sure y is an array and not a list y = np.array(y) # Convert y to 2D if necessary if len(np.shape(y)) == 1: y = np.reshape(y, [len(y), 1]) # Verify X and y are the same length if not np.shape(X)[0] == np.shape(y)[0]: raise Exception('The length of X and y must be equal.') # Determine number of nodes in each layer input_nodes = np.shape(X)[1] + self.bias output_nodes = np.shape(y)[1] node_list = [input_nodes] + self.hidden_nodes + [output_nodes] num_nodes = 0 for i in range(len(node_list) - 1): num_nodes += node_list[i]*node_list[i+1] if init_weights is not None and len(init_weights) != num_nodes: raise Exception("""init_weights must be None or have length %d""" % (num_nodes,)) # Set random seed if isinstance(self.random_state, int) and self.random_state > 0: np.random.seed(self.random_state) # Initialize optimization problem fitness = NetworkWeights(X, y, node_list, self.activation_dict[self.activation], self.bias, self.is_classifier, learning_rate=self.learning_rate) problem = ContinuousOpt(num_nodes, fitness, maximize=False, min_val=-1*self.clip_max, max_val=self.clip_max, step=self.learning_rate) if self.algorithm == 'random_hill_climb': fitted_weights = None loss = np.inf # Can't use restart feature of random_hill_climb function, since # want to keep initial weights in the range -1 to 1. for _ in range(self.restarts + 1): if init_weights is None: init_weights = np.random.uniform(-1, 1, num_nodes) if self.curve: current_weights, current_loss, fitness_curve = \ random_hill_climb(problem, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, restarts=0, init_state=init_weights, curve=self.curve) else: current_weights, current_loss = random_hill_climb( problem, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, restarts=0, init_state=init_weights, curve=self.curve) if current_loss < loss: fitted_weights = current_weights loss = current_loss elif self.algorithm == 'simulated_annealing': if init_weights is None: init_weights = np.random.uniform(-1, 1, num_nodes) if self.curve: fitted_weights, loss, fitness_curve = simulated_annealing( problem, schedule=self.schedule, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, init_state=init_weights, curve=self.curve) else: fitted_weights, loss = simulated_annealing( problem, schedule=self.schedule, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, init_state=init_weights, curve=self.curve) elif self.algorithm == 'genetic_alg': if self.curve: fitted_weights, loss, fitness_curve = genetic_alg( problem, pop_size=self.pop_size, mutation_prob=self.mutation_prob, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, curve=self.curve) else: fitted_weights, loss = genetic_alg( problem, pop_size=self.pop_size, mutation_prob=self.mutation_prob, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, curve=self.curve) else: # Gradient descent case if init_weights is None: init_weights = np.random.uniform(-1, 1, num_nodes) if self.curve: fitted_weights, loss, fitness_curve = gradient_descent( problem, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, curve=self.curve, init_state=init_weights) else: fitted_weights, loss = gradient_descent( problem, max_attempts=self.max_attempts if self.early_stopping else self.max_iters, max_iters=self.max_iters, curve=self.curve, init_state=init_weights) # Save fitted weights and node list self.node_list = node_list self.fitted_weights = fitted_weights self.loss = loss self.output_activation = fitness.get_output_activation() if self.curve: self.fitness_curve = fitness_curve return self