def ga_fitted(): np.random.seed(10) clf = RandomForestClassifier(max_depth=3, random_state=2222) ga = GeneticAlgorithm(clf, 5, duration=0.5) iris = load_iris() ga.fit(iris.data, iris.target) return ga
def __init__(self, neural_network, population_size=100, number_of_generations=10, mutation_probability=0.01, number_of_elites=0): """ Initialization function of the class ... Parameters ---------- Specified in the class docstring Returns ------- None Raises ------ None """ # Set the Neural Network self.neural_network = neural_network # Set the default range self.output_range = [-5, 5] # Chromosome length is derived from Neural Network chromosome_length = self.neural_network.number_of_parameters GeneticAlgorithm.__init__(self, population_size, number_of_generations, mutation_probability, chromosome_length, number_of_elites) # Generate some class variables self._chromosome_setting()
def __init__(self, args): super().__init__(args) self._name = "q_MX_minfit_p15" self.max_uid = 0 self._num_simulations = 5 population_size = 30 self.ga = GeneticAlgorithm(population_size, r_mutation=0.05, apex_stddev=0.25) # self.ga.generation = 60 self.result_map = {} self.points_map = {} self.alive_map = {} # population_size = args.population_size # args.r_mutation = 0.4 self._generation_info = [] for i in range(self.ga.population_size): self._add_snake() self._load_genomes()
def simulate(params, target_sentence="Hello World"): np.random.seed(123) random.seed(123) MAX_GEN = params['max_gen'] # termination MAX_SUCCESS = params['max_success'] # termination POPULATION_SIZE = params['population_size'] NUM_ATTRIBUTES = len(target_sentence) MUTATION_PROB = params['mutation_prob'] CROSSOVER_PROB = params['crossover_prob'] GA = GeneticAlgorithm(fitness_function, num_attributes=NUM_ATTRIBUTES, population_size=POPULATION_SIZE, mutation_prob=MUTATION_PROB, crossover_prob=CROSSOVER_PROB) GA.initialize_population() scores = [] generation_counter = 0 success_counter = 0 while generation_counter < MAX_GEN and success_counter < MAX_SUCCESS: GA.compute_fitness_score() scores.append(np.mean(GA.fitness_scores)) print('Generation', generation_counter, ", avg score:", scores[generation_counter], ", best:", GA.get_best()) if GA.get_best() == target_sentence: success_counter += 1 GA.run() generation_counter += 1 return scores, generation_counter
def __init__(self, clf, fold, duration=None, max_iter=None, base_included=True): """Init method. Args: clf : classifier object implementing 'fit' Classfier used for scoring new features. fold : int, cross-validation generator or an iterable Determines the cross-validation splitting strategy, see also http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.cross_val_score.html. duration : int Determines how many minutes a genetic algorithm runs. max_iter : int Determines how many iterations a genetic algorithm runs. base_included : bool Determines whether or not the base dataset is included during the evaluation of newly created features. ga : GeneticAlgorithm Object used for creating new sets of features. """ self.clf = clf self.fold = fold self.duration = duration self.max_iter = max_iter self.base_included = base_included self.ga = GeneticAlgorithm(clf, fold, duration, max_iter, base_included)
def main(): parser = argparse.ArgumentParser( description='Poorly solve the general assignment problem.', fromfile_prefix_chars='@') parser.add_argument('filename', metavar='filename', type=unicode, help='a data file to parse and run on') parser.add_argument('--population', metavar='population', type=int, default=100, help='how large to keep population') parser.add_argument('--evaluations', metavar='evaluations', type=int, default=100000, help='how many evaluations to run between restarts') parser.add_argument('--restarts', metavar='restarts', type=int, default=10, help='how often to restart from a random point') args = parser.parse_args() POPULATION = args.population NUMBER_OF_EVALUATIONS = args.evaluations RESTARTS = args.restarts total_evaluations = 0 top_genotypes = [] for n in xrange(RESTARTS): print "restart", n, "out of {0}:".format(RESTARTS) print with open(args.filename) as f: ga = GeneticAlgorithm.from_file(f, fittest_found_callback) ga.generate_random_solutions(POPULATION) evolutions = NUMBER_OF_EVALUATIONS / POPULATION for _ in xrange(evolutions): ga.evolve_population() indexes = agent_indexes(ga.agents) top_genotypes.extend(assignment_indexes(solution) for solution in ga.solution_pool) total_evaluations += ga.evaluations with open(args.filename) as f: ga = GeneticAlgorithm.from_file(f, fittest_found_callback) ga.solution_pool = [Solution(ga.agents, assignment_agents(genotype, ga.agents)) for genotype in top_genotypes] while len(ga.solution_pool) > 10: ga.double_population() ga.halve_population() ga.halve_population() print "population:", len(ga.solution_pool) print "all done:" print "total_evaluations: ", (ga.evaluations + total_evaluations) best_solution = sorted(ga.solution_pool, key=lambda s: s.total_cost)[0] fittest_found_callback(best_solution, ga)
def run(): ''' Main program. ''' args = get_args() rospy.init_node('pf', anonymous=True) # Load input sampling_points = np.genfromtxt(args['rewards'], delimiter=' ', dtype=float) print 'Starting Genetic Algorithm...' # Run the GA heuristic to get robot path. ga_op = GeneticAlgorithm(sampling_points, args['max_cost'], args['gen'], args['pop']) path, cost, rewards = ga_op.run( (args['start_x'], args['start_y']), (args['end_x'], args['end_y']), ) print 'Done.' dim = sampling_points.shape filename = 'path_{}x{}_maxc{}.txt'.format(dim[0], dim[1], args['max_cost']) with open(filename, 'w') as output_file: output_file.write('{}\n'.format(args['max_cost'])) output_file.write('{}\n'.format(round(cost, 2))) output_file.write('{}\n'.format(round(rewards, 2))) for coord in path: output_file.write('{}\n'.format(coord)) print_path(dim[0], dim[1], sampling_points, path, cost, rewards, args['max_cost']) if args['navigate'] == 0: return robot = Robot(RATE, QUEUE_SIZE) while not robot.is_ready(): pass print 'Starting navigation through path:', path goal = None # Control robot through the path. while not rospy.is_shutdown() and len(path) > 0: if goal is None or not robot.bug2(goal[0], goal[1]): goal = path.pop(0) print 'Done.' return
def test_ga_init(): clf = RandomForestClassifier(max_depth=3, random_state=2222) with pytest.raises(ValueError): ga = GeneticAlgorithm(clf, 5) with pytest.raises(ValueError): ga = GeneticAlgorithm(clf, 5, duration='s') with pytest.raises(ValueError): ga = GeneticAlgorithm(clf, 5, max_iter=0.5) with pytest.raises(ValueError): ga = GeneticAlgorithm(clf, 5, 1, 1) with pytest.raises(ValueError): ga = GeneticAlgorithm(clf, 5, base_included='s')
def train_ga(self, generations=90): flattened = self.weights_flattened() initial_solutions = np.random.uniform(low=-1.0, high=1.0, size=(20, len(flattened))) initial_solutions[0] = np.array(flattened, dtype=np.float64) ga = GeneticAlgorithm(solutions=initial_solutions, num_parents_for_mating=4, generations=generations, fitness_func=self.fitness_func, offspring_sz=4) ga.start() self.weights = self.weights_unflattened( ga.solutions[0]) #best solution
def main(): # agent params params = { "size_pop": 100, "crossover_rate": 0.9, "mutation_rate": 0.03, "generations": 100, "agent_dimension": 10, "agent": Rastrigin } genetic_algorithm = GeneticAlgorithm(**params) solution = genetic_algorithm.start() print(solution) genetic_algorithm.plotGraphic()
def iniciar(): calcular = CalcFitness() calcular.precision = 10 # Los coeficientes de z calcular.get_limites() calcular.get_mj() poblacion = Poblacion(10, True, calcular) algorithm = GeneticAlgorithm(True) i = 0 print(calcular.mj) print(calcular.aj) print(calcular.bj) print(calcular.longitud) for i in range(5000): print('Poblacion: ' + str(i+1)) print(poblacion) poblacion = algorithm.get_next_generation(poblacion) print('Poblacion: ' + str(i + 1)) print(poblacion)
def testWithFakeFitness(table_len=1000, pop_size=100, chr_size=10, generations_to_run=200, random_seed=None): np.random.seed(random_seed) fake_fitness_table = np.random.randn(table_len) def fake_fitness_function(chromossome): return np.mean(fake_fitness_table[chromossome]) gene_set = list(range(table_len)) selection = Selection(0.5) crossover = Crossover(kind=1) mutation = Mutation(0.5) optimizer = GeneticAlgorithm(gene_set, selection, crossover, mutation) optimizer.initializePopulation(chr_size, pop_size) random_optimizer = RandomSearch(gene_set) random_optimizer.initializePopulation(chr_size, pop_size) plt.grid() for i in range(generations_to_run): optimizer.step(fake_fitness_function) random_optimizer.step(fake_fitness_function) plt.scatter([i] * pop_size, [fake_fitness_function(x) for x in optimizer.population], c='b') plt.scatter(i, random_optimizer.best_fitness, c='g') plt.show()
def ga(): np.random.seed(10) iris = load_iris() clf = RandomForestClassifier(max_depth=3, random_state=2222) ga = GeneticAlgorithm(clf, cv=5, duration=0.5) ga.X = np.asarray(iris.data) ga.y = np.asarray(iris.target) ga.y = ga.y.reshape(ga.y.shape[0], ) ga.n_features = np.random.random_integers(10) return ga
class GeneticAlgorithmTestCase(unittest.TestCase): def setUp(self): self.ga = GeneticAlgorithm(threads=1, env_name="BipedalWalker-v3", max_episode_len=100, seed=42) def test_smoke_mutate(self): sigma = 0.002 ind = self.ga.init_population(1)[0] mutated = self.ga.mutate(ind, sigma) def test_smoke_evalute_fitness(self): ind = self.ga.init_population(1)[0] fitness = self.ga.evaluate_fitness(ind) @async_test async def test_smoke_async_evaluate_fitness(self): ind = self.ga.init_population(1)[0] task = asyncio.gather( asyncio.ensure_future(self.ga.evaluate_fitness(ind)), asyncio.ensure_future(self.ga.evaluate_fitness(ind))) await task
def testWithRealFitness(fitness_function, table_len, pop_size=100, chr_size=10, generations_to_run=200, random_seed=None): gene_set = list(range(table_len)) selection = Selection(0.5) crossover = Crossover(kind=1) mutation = Mutation(0.5) optimizer = GeneticAlgorithm(gene_set, selection, crossover, mutation) optimizer.initializePopulation(chr_size, pop_size) random_optimizer = RandomSearch(gene_set) random_optimizer.initializePopulation(chr_size, pop_size) plt.grid() for i in range(generations_to_run): optimizer.step(fitness_function) random_optimizer.step(fitness_function) plt.scatter([i] * pop_size, [fitness_function(x) for x in optimizer.population], c='b') plt.scatter(i, random_optimizer.best_fitness, c='g') plt.show() return optimizer
from ga import GeneticAlgorithm ga = GeneticAlgorithm(elitism=False) ga.run(1500)
class FeatureConstructor: """Create new features using genetic algorithm.""" def __init__(self, clf, fold, duration=None, max_iter=None, base_included=True): """Init method. Args: clf : classifier object implementing 'fit' Classfier used for scoring new features. fold : int, cross-validation generator or an iterable Determines the cross-validation splitting strategy, see also http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.cross_val_score.html. duration : int Determines how many minutes a genetic algorithm runs. max_iter : int Determines how many iterations a genetic algorithm runs. base_included : bool Determines whether or not the base dataset is included during the evaluation of newly created features. ga : GeneticAlgorithm Object used for creating new sets of features. """ self.clf = clf self.fold = fold self.duration = duration self.max_iter = max_iter self.base_included = base_included self.ga = GeneticAlgorithm(clf, fold, duration, max_iter, base_included) def fit(self, X, y): """Fit estimator. Args: X : array-like The data to fit. y : array-like The target variable. """ self.ga.fit(X, y) def get_params(self, ind='best'): """Print best or most frequent set of new features. Args: ind : string, 'best' or 'most_freq' Determines which set of features save to a file. """ self.ga.get_params(ind) def save(self, filename, ind='best'): """Save the best or most frequent set of features to a file. Args: filename : string ind : string, 'best' or 'most_freq' Determines which set of features save to a file. """ if ind == 'best' or ind == 'most_freq': self.ga.save(filename, ind) else: raise ValueError("ind must be 'best' or 'most_freq'.") def load(self, filename): """Load a set of features from a file. Args: filename : string Returns: Tuple with a set of features. """ return self.ga.load(filename) def transform(self, X, individual): """Transform dataset into new one using created features. Args: X : array-like The data to transform. individual : tuple Tuple with a set of features. Returns: New dataset, array-like. """ return self.ga.transform(X, individual) def plot(self): """Plot data from the genetic algorithm.""" self.ga.plot()
def main(): GA = GeneticAlgorithm(Settings.POPULATION_SIZE) GA.evolve(Settings.MAX_GENERATIONS)
break try: chromosome = re.findall(r'[0-9,]+', line)[0] except IndexError: logger2.error("Check your {} chromosome!!".format(string)) sys.exit(0) chromosome_bits = chromosome.split(',') # convert str to int buy_chromosome_bits = [int(x) for x in chromosome_bits] #(4.) create solution s = Solution() s.chromosome_bits = buy_chromosome_bits #(5.) compute fitness ga = GeneticAlgorithm(parameter_dict, testing_data_dict) # -(a) translate s.translate_chromosome_bits(ga.feature_pos_dict) # -(c) get the classfiled result in each day classification_result = s.get_classification_result(ga) if not classification_result: print("No stocks returned for {} chromosome".format(string)) continue # -(d) compute fitness american_stock_fitness = AmericanStockFitness(parameter_dict) american_stock_fitness(testing_data_dict, s) testing_output_path = "test_data_result/{}_chromosome_testing.txt".format( string) print("testing_output_path: ", testing_output_path) Solution.compute_profit()
import time from tqdm import tqdm import matplotlib.pyplot as plt from ga import GeneticAlgorithm if __name__ == "__main__": phrase = "fire ball" # The phrase we want the computer to match search_space = list( "abcdefghijklmnopqrstuvwxyz " ) # Our search space (to tell the computer its possible options) g_a = GeneticAlgorithm(search_space) # Perform experiment. run_iters = [] run_time = [] exp_start = time.time() for i in tqdm(range(10)): start_time = time.time() curr_iter = g_a.run(phrase) time_taken = time.time() - start_time # Append iterations taken. run_iters.append(curr_iter) # Append time taken. run_time.append(time_taken) print( f"Total time taken to run experiment: {time.time() - exp_start:.2f} seconds" )
# seed radius IS = parameter_dict['DSGA']['IS'] # radius delta SD = parameter_dict['DSGA']['SD'] seed_radius = SeedRadius(parameter_dict) # (2.) put data into dict formatter1 = Formatter(parameter_dict) input_data_dict = formatter1.format_and_create_dict( formatter1.path, formatter1.feature_choice_list) logger1.info("create input_data_dict successful") # get the range of the feature value formatter1.compute_chosen_feature_value_range() # (3.) create initial parents ga = GeneticAlgorithm(parameter_dict, input_data_dict) ga.seed_radius = seed_radius ga.create_initial_parents() off_spring_generation = OffspringGeneration(parameter_dict) big_loop = 100 while not ga.END: RLC = parameter_dict['DSGA']['RLC'] for i in range(50): # (4.) offspring generation , return target, compute fitness current_solution_pool = off_spring_generation(Solution.all()) ga.process_new_solutions(current_solution_pool) # (5.) compute shared fitness Solution.compute_shared_fitness(ga) # (6.) find seed solution Solution.find_seed_solution(ga)
pipes.pipe_up) == 1: return True elif bird.rect.y < 0 or bird.rect.y > cs.HEIGHT: return True return False def get_closest_pipe(piepes, x): for pipe in piepes: if pipe.pipe_down.x - x + cs.P_WIDTH > 0: return pipe firstStart = True gen = GeneticAlgorithm() def play(daemon, firstStart=False, generation=1): counter = 0 if firstStart: population = gen.population else: population = gen.crossover() fitness = 0 deadcounter = 0 pipes = [Pipe(randint(120, 400))]
from env_ones import Ones from ga import GeneticAlgorithm e = Ones() par = {"crossover.type" : "single", "elitism" : False, "n.generations" : 100, "n.individuals" : 100, "p.crossover" : 0.8, "p.mutation" : 0.01, "selection.type" : "tournament.selection", "tournament.size" : 2, "type" : "simple"} g = GeneticAlgorithm(e, par) print (g.population) g.evolve() print (g.population)
prob_weight_reset = 0.001 max_gen = 500 weight_range = 50.0 mut_range = 5.0 target_values = [10.0, 20.0] ga = GeneticAlgorithm(len_indv=len_indv, pop_size=pop_size, num_par=num_par, prob_mut=prob_mut, prob_xover=prob_xover, prob_survival=prob_survival, prob_weight_reset=prob_weight_reset, max_gen=max_gen, weight_range=weight_range, mut_range=mut_range, target_values=target_values, node_types=node_types, n_nodes_input=n_nodes_input, n_nodes_hidden=n_nodes_hidden, n_nodes_output=n_nodes_output, add_bias=add_bias, out_ma_len=out_ma_len, tau=tau, weight_epsilon=weight_epsilon, max_ticks=max_ticks) ga.evolve() # Housekeeping to save all pertinent experiment results: script_path = os.getcwd() exp_path = script_path + '\\' + exp_id
} """ """ par = {"crossover.type" : "single", "elitism" : False, "n.generations" : 100, "n.individuals" : 100, "p.crossover" : 0.8, "p.mutation" : 0.01, "selection.type" : "tournament.selection", "tournament.size" : 2, "type" : "simple", "sharing.domain" : "phenotype", "alpha.share" : 1, "theta.share" : 0.1, "fitness.sharing" : True } """ prom = tsp.energiaPromedio() sa = SimulatedAnnealing(tsp) best = list(sa.search(0.999, prom, prom / 1000, 100)[0]) tsp.showSA(best) tsp = TSP(20, True) ga = GeneticAlgorithm(tsp, par) tsp.show(ga.population) print(ga.population) ga.evolve() tsp.show(ga.population)
features_cols = train_x[0].shape[0] features_rows = train_x[0].shape[1] parameter_size = train_y[0].shape[0] features = tf.placeholder(tf.float32, [None, features_cols, features_rows]) patches = tf.placeholder(tf.float32, [None, parameter_size]) prob_keep_input = tf.placeholder(tf.float32) prob_keep_hidden = tf.placeholder(tf.float32) batch_size = tf.placeholder(tf.int32) warnings.simplefilter("ignore") lstm = LSTM(features=features, labels=patches, batch_size=batch_size) ga = GeneticAlgorithm(extractor=extractor, population_size=200, percent_elitism_elites=5, percent_elitist_parents=5, dna_length=(parameter_size), target_features=test_x, feature_size=(features_cols * features_rows), mutation_rate=0.01, mutation_size=0.1) hill_climber = HillClimber(extractor=extractor, target_features=test_x, feature_size=(features_cols * features_rows), parameter_size=parameter_size, averaging_amount=4) mlp = MLP(features=features, labels=patches, parameters=[50, 40, 30], prob_keep_input=prob_keep_input, prob_keep_hidden=prob_keep_hidden) if parameter_size == 155: hier_mlp = RecursiveMLP(features=features, labels=patches, parameters=[50, 40, 30],
target_values[0, 0] = 5.0 target_values[0, 1] = 15.0 #target_values = np.array([5.0, 10.0]) ga = GeneticAlgorithm(len_indv=len_indv, pop_size=pop_size, num_par=num_par, prob_mut=prob_mut, prob_xover=prob_xover, prob_survival=prob_survival, prob_weight_reset=prob_weight_reset, max_gen=max_gen, weight_range=weight_range, mut_range=mut_range, target_values=target_values, node_types=node_types, n_nodes_input=n_nodes_input, n_nodes_cortex=n_nodes_cortex, n_nodes_hidden=n_nodes_hidden, n_nodes_output=n_nodes_output, intralayer_connections_flag=intralayer_connections_flag, add_bias=add_bias, out_ma_len=out_ma_len, tau=tau, max_ticks=max_ticks, switch_tick=switch_tick, bg_nodes=bg_nodes, bg_sync_freq=bg_sync_freq) ga.evolve() archive_results(exp_id, ga)
table = randtable(n) tc = trueclauses(table, formula) if tc > local: local = tc if local > randsearch: randsearch = local print(randsearch) return randsearch formula, n = readsat("uf100-01.cnf") #print(formula, len(formula)) popsize = 60 epochs = 10000 print("# Global Optimum:", len(formula)) #print("randsearch", randomsearch(popsize, epochs, formula, n)) ga = GeneticAlgorithm(n, crossoverrate=0.99, mutationrate=(0.05, 0.01), popsize=popsize, elitism=5, epochs=epochs) ga.setfitness(fitness, formula) ga.setgenometophenome(gtop) ga.setphenometogenome(ptog) solucao = ga.evolve() print(solucao)
#Add a column of random numbers to the data rand_col_verif = 100 * np.random.rand(len(x_verif)) x_verif = np.insert(x_verif, 2, rand_col_verif, axis=1) """ Segment 3: GA, I guess. """ #Run GA to find best weights N_init_pop = 50 N_crossover = 100 N_selection = 50 improv_thresh = 1e-3 print("Step 1.") weight_ga = GeneticAlgorithm(3, N_init_pop, mu=0.1) weight_pop = weight_ga.get_population() metric_array = np.empty(N_init_pop) for i in range(len(weight_pop)): #Scale input data scaled_x_train = np.multiply(x_train, weight_pop[i]) #Scale verificaion data scaled_x_verif = np.multiply(x_verif, weight_pop[i]) #Method 1 reg = KNNRegressor(scaled_x_train, y_train, 5) neighbors = reg.find_all_neighbors(scaled_x_verif) nbh_std = reg.find_neighborhood_std(neighbors) metric_array[i] = nbh_std #Update fitness in GA object
# ====================format ga_unittest_dict end======================= #-----------------------get targets returned and fitness #(1.) read para reader1 = ReadParameters() parameter_dict = reader1.read_parameters(para_file_path) # (2.) put data into dict formatter1 = Formatter(parameter_dict, path=data_file_path) input_data_dict = formatter1.format_and_create_dict( formatter1.path, formatter1.feature_choice_list) formatter1.compute_chosen_feature_value_range() # (3.) create initial parents ga = GeneticAlgorithm(parameter_dict, input_data_dict) #(4.) create solution s = Solution() s.chromosome_bits = ga_unittest_dict['chromosome_bits'] # (a) translate chromosome bits list to decimal value s.translate_chromosome_bits(ga.feature_pos_dict) # (b) get the classfiled result in each day s.get_classification_result(ga) # (c) compute the fitness for solution american_stock_fitness = AmericanStockFitness(parameter_dict) american_stock_fitness(ga.input_data_dict, s) # TESING CODE golden_return = ga_unittest_dict['golden_result'] golden_fitness = ga_unittest_dict['golden_fitness']
def ga_run(x_train, y_train, x_test, y_test, x_verif, y_verif, k): # Run GA to find best weights. N_init_pop = 50 N_crossover = 50 N_selection = 20 improv_thresh = 1e-3 _, nFeats = np.shape(x_train) weight_ga = GeneticAlgorithm(nFeats, N_init_pop, mu=0.1) weight_pop = weight_ga.get_population() metric_array = np.empty(N_init_pop) # Create the initial population. for i in range(len(weight_pop)): # Scale input data scaled_x_train = np.multiply(x_train, weight_pop[i]) # Scale verificaion data scaled_x_verif = np.multiply(x_verif, weight_pop[i]) # Regressor. reg = KNNRegressor(scaled_x_train, y_train, k) neighbors = reg.find_all_neighbors(scaled_x_verif) nbh_std = reg.find_neighborhood_std(neighbors) metric_array[i] = nbh_std # Update fitness in GA object. weight_ga.set_fitness(metric_array) weight_ga.selection(N_selection) new_best_metric = 2.5 # while (best_metric - new_best_metric) > improv_thresh: count = 0 while (count < 20): count += 1 best_metric = new_best_metric # Crossover. weight_ga.crossover(N_crossover) # Get new population. weight_pop = weight_ga.get_population() metric_array = np.empty(N_crossover) # Evaluate and set fitness. for i in range(len(weight_pop)): # Scale input data scaled_x_train = np.multiply(x_train, weight_pop[i]) # Scale verificaion data scaled_x_verif = np.multiply(x_verif, weight_pop[i]) # Regressor. reg = KNNRegressor(scaled_x_train, y_train, k) neighbors = reg.find_all_neighbors(scaled_x_verif) nbh_std = reg.find_neighborhood_std(neighbors) metric_array[i] = nbh_std # Update fitness in GA object weight_ga.set_fitness(metric_array) # get_best_sol best_weights, new_best_metric = weight_ga.best_sol() #print("Metric of this iteration are: ", new_best_metric) weight_ga.selection(N_selection) # print("Best weights = ", best_weights, "\tBest metric = ", new_best_metric) # Test with scaling after GA # Concatenate training and verification sets. x_train = np.concatenate((x_train, x_verif), axis=0) y_train = np.concatenate([y_train, y_verif]) # Print the results of KNN. reg = KNNRegressor(np.multiply(x_train, best_weights), y_train, k) y_pred = reg.predict(np.multiply(x_test, best_weights)) mse_iter = skmse(y_test, y_pred) print("ga,knn,", k, ",", mse_iter) # Print the results of KNN. reg = DwKNNRegressor(np.multiply(x_train, best_weights), y_train, k) y_pred = reg.predict(np.multiply(x_test, best_weights)) mse_iter = skmse(y_test, y_pred) print("ga,dknn,", k, ",", mse_iter)