def run(test_problem, max_iterations: int, number_of_runs: int, file_prefix: str, tol=-1, visualisation=False, aPreCallback=None, aPostCallback=None): global g_test_problem global g_iterations g_test_problem = test_problem # Store the results for each optimisation method columns = ['Run', 'Methods'] for i in range(test_problem.number_of_dimensions): columns.append("X_" + str(i)) columns.append("Objective value") columns.append("Euclidean distance") columns.append("Evaluations") df = pd.DataFrame(columns=columns) for run_id in range(number_of_runs): print("Run #", run_id) # Create a random guess common to all the optimisation methods initial_guess = g_test_problem.initialRandomGuess() # Optimisation methods implemented in scipy.optimize methods = [ 'Nelder-Mead', 'Powell', 'CG', 'BFGS', 'L-BFGS-B', 'TNC', 'COBYLA', 'SLSQP' ] for method in methods: g_test_problem.number_of_evaluation = 0 optimiser = ScipyMinimize(g_test_problem, method, tol=tol, initial_guess=initial_guess) print("\tOptimiser:", optimiser.full_name) if not isinstance(aPreCallback, (str, type(None))): aPreCallback(optimiser, file_prefix, run_id) optimiser.setMaxIterations(max_iterations) if run_id == 0 and visualisation: optimiser.plotAnimation( aNumberOfIterations=max_iterations, aCallback=None, aFileName=(file_prefix + "_" + optimiser.short_name + "_%d.png")) else: optimiser.run() df = appendResultToDataFrame(run_id, optimiser, df, columns, file_prefix) if not isinstance(aPostCallback, (str, type(None))): aPostCallback(optimiser, file_prefix, run_id) # Parameters for EA g_iterations = int(max_iterations / g_number_of_individuals) # Optimisation and visualisation g_test_problem.number_of_evaluation = 0 optimiser = EvolutionaryAlgorithm(g_test_problem, g_number_of_individuals, initial_guess=initial_guess) print("\tOptimiser:", optimiser.full_name) if not isinstance(aPreCallback, (str, type(None))): aPreCallback(optimiser, file_prefix, run_id) # Set the selection operator #optimiser.setSelectionOperator(TournamentSelection(3)); #optimiser.setSelectionOperator(RouletteWheel()); optimiser.setSelectionOperator(RankSelection()) # Create the genetic operators gaussian_mutation = GaussianMutationOperator(0.1, 0.3) elitism = ElitismOperator(0.1) new_blood = NewBloodOperator(0.0) blend_cross_over = BlendCrossoverOperator(0.6, gaussian_mutation) # Add the genetic operators to the EA optimiser.addGeneticOperator(new_blood) optimiser.addGeneticOperator(gaussian_mutation) optimiser.addGeneticOperator(blend_cross_over) optimiser.addGeneticOperator(elitism) if run_id == 0 and visualisation: optimiser.plotAnimation( aNumberOfIterations=g_iterations, aCallback=visualisationCallback, aFileName=(file_prefix + "_" + optimiser.short_name + "_%d.png")) else: for _ in range(1, g_iterations): optimiser.runIteration() visualisationCallback() df = appendResultToDataFrame(run_id, optimiser, df, columns, file_prefix) if not isinstance(aPostCallback, (str, type(None))): aPostCallback(optimiser, file_prefix, run_id) # Parameters for PSO # Optimisation and visualisation g_test_problem.number_of_evaluation = 0 optimiser = PSO(g_test_problem, g_number_of_individuals, initial_guess=initial_guess) print("\tOptimiser:", optimiser.full_name) if not isinstance(aPreCallback, (str, type(None))): aPreCallback(optimiser, file_prefix, run_id) if run_id == 0 and visualisation: optimiser.plotAnimation( aNumberOfIterations=g_iterations, aCallback=visualisationCallback, aFileName=(file_prefix + "_" + optimiser.short_name + "_%d.png")) else: for _ in range(1, g_iterations): optimiser.runIteration() visualisationCallback() df = appendResultToDataFrame(run_id, optimiser, df, columns, file_prefix) if not isinstance(aPostCallback, (str, type(None))): aPostCallback(optimiser, file_prefix, run_id) # Optimisation and visualisation optimiser = PureRandomSearch(g_test_problem, max_iterations, initial_guess=initial_guess) print("\tOptimiser:", optimiser.full_name) if not isinstance(aPreCallback, (str, type(None))): aPreCallback(optimiser, file_prefix, run_id) g_test_problem.number_of_evaluation = 0 if run_id == 0 and visualisation: optimiser.plotAnimation( aNumberOfIterations=max_iterations, aCallback=None, aFileName=(file_prefix + "_" + optimiser.short_name + "_%d.png")) else: for _ in range(max_iterations): optimiser.runIteration() df = appendResultToDataFrame(run_id, optimiser, df, columns, file_prefix) if not isinstance(aPostCallback, (str, type(None))): aPostCallback(optimiser, file_prefix, run_id) # Optimisation and visualisation g_test_problem.number_of_evaluation = 0 optimiser = SimulatedAnnealing(g_test_problem, 5000, 0.04, initial_guess=initial_guess) print("\tOptimiser:", optimiser.full_name) optimiser.cooling_schedule = cooling if not isinstance(aPreCallback, (str, type(None))): aPreCallback(optimiser, file_prefix, run_id) if run_id == 0 and visualisation: optimiser.plotAnimation( aNumberOfIterations=max_iterations, aCallback=None, aFileName=(file_prefix + "_" + optimiser.short_name + "_%d.png")) else: for _ in range(1, max_iterations): optimiser.runIteration() #print(optimiser.current_temperature) df = appendResultToDataFrame(run_id, optimiser, df, columns, file_prefix) if not isinstance(aPostCallback, (str, type(None))): aPostCallback(optimiser, file_prefix, run_id) title_prefix = "" if g_test_problem.name != "": if g_test_problem.flag == 1: title_prefix = "Minimisation of " + g_test_problem.name + "\n" else: title_prefix = "Maximisation of " + g_test_problem.name + "\n" boxplot(df, 'Evaluations', title_prefix + 'Number of evaluations', file_prefix + 'evaluations.pdf', False) boxplot( df, 'Euclidean distance', title_prefix + 'Euclidean distance between\nsolution and ground truth', file_prefix + 'distance.pdf', False) plt.show()
# Make sure the mutation variance is up-to-date gaussian_mutation.setMutationVariance(g_current_sigma) # Optimisation and visualisation optimiser = EvolutionaryAlgorithm(test_problem, g_number_of_individuals) # Set the selection operator #optimiser.setSelectionOperator(TournamentSelection(2)); #optimiser.setSelectionOperator(RouletteWheel()); optimiser.setSelectionOperator(RankSelection()) # Create the genetic operators elitism = ElitismOperator(0.1) new_blood = NewBloodOperator(0.1) gaussian_mutation = GaussianMutationOperator(0.1, 0.2) blend_cross_over = BlendCrossoverOperator(0.6, gaussian_mutation) # Add the genetic operators to the EA optimiser.addGeneticOperator(new_blood) optimiser.addGeneticOperator(gaussian_mutation) optimiser.addGeneticOperator(blend_cross_over) optimiser.addGeneticOperator(elitism) test_problem.number_of_evaluation = 0 optimiser.plotAnimation(g_iterations, visualisationCallback) EA_number_of_evaluation = test_problem.number_of_evaluation EA_solution = optimiser.best_solution # Optimisation and visualisation
g_min_mutation_sigma = 0.01 g_current_sigma = g_max_mutation_sigma # Set the selection operator tournament_selection = TournamentSelection(2) threshold_selection = ThresholdSelection(0.0, tournament_selection, round(0.25 * g_number_of_individuals)) optimiser.setSelectionOperator(threshold_selection) #optimiser.setSelectionOperator(tournament_selection); #optimiser.setSelectionOperator(RouletteWheel()); #optimiser.setSelectionOperator(RankSelection()); # Create the genetic operators new_blood = NewBloodOperator(0.5) gaussian_mutation = GaussianMutationOperator(0.5, 0.2) # Add the genetic operators to the EA optimiser.addGeneticOperator(new_blood) optimiser.addGeneticOperator(gaussian_mutation) g_iterations = round(max_iterations / g_number_of_individuals) for i in range(g_iterations): print(i + 1, '/', g_iterations) # Compute the value of the mutation variance sigma = g_min_mutation_sigma + (g_iterations - 1 - i) / ( g_iterations - 1) * (g_max_mutation_sigma - g_min_mutation_sigma) # When i increases, new_blood.probability decreases
# Set the selection operator selection_operator = None if args.selection[0] == "dual" or args.selection[0] == "tournament": selection_operator = tournament_selection elif args.selection[0] == "threshold": selection_operator = ThresholdSelection(0, tournament_selection, 10) else: raise ValueError( 'Invalid selection operator "%s". Choose "threshold", "tournament" or "dual".' % (args.selection[0])) optimiser.setSelectionOperator(selection_operator) # Create the genetic operators new_blood = NewBloodOperator(args.initial_new_blood_probability[0]) gaussian_mutation = GaussianMutationOperator( 1.0 - args.initial_new_blood_probability[0], args.initial_mutation_variance[0]) # Add the genetic operators to the EA optimiser.addGeneticOperator(new_blood) optimiser.addGeneticOperator(gaussian_mutation) # Show the visualisation if args.visualisation: fig, ax = plt.subplots(7, 2) global_fitness_function.plot(fig, ax, 0, number_of_generation) # Create a progress bar bar = MyBar('Generation', max=number_of_generation)
g_number_of_individuals = args.individuals g_iterations = args.generations g_max_mutation_sigma = args.max_mutation_sigma g_min_mutation_sigma = args.min_mutation_sigma objective_function = HandFunction(target_image, number_of_params) optimiser = EvolutionaryAlgorithm(objective_function, g_number_of_individuals, initial_guess=initial_guess) optimiser.setSelectionOperator(RankSelection()) # Create the genetic operators elitism = ElitismOperator(args.elitism) new_blood = NewBloodOperator(args.new_blood) gaussian_mutation = GaussianMutationOperator(args.gaussian_mutation[0], args.gaussian_mutation[1]) blend_cross_over = BlendCrossoverOperator(args.blend_cross_over, gaussian_mutation) # Add the genetic operators to the EA optimiser.addGeneticOperator(new_blood) optimiser.addGeneticOperator(gaussian_mutation) optimiser.addGeneticOperator(blend_cross_over) optimiser.addGeneticOperator(elitism) for i in range(g_iterations): # Compute the value of the mutation variance sigma = g_min_mutation_sigma + (g_iterations - 1 - i) / ( g_iterations - 1) * (g_max_mutation_sigma - g_min_mutation_sigma)