Пример #1
0
def main():
    params = load_params()
    width = int(params[0])
    height = int(params[1])
    radius = float(params[2])
    min_coverage = int(params[3])
    rest_areas = eval("[%s]" % params[4])  # restriction areas
    if width < 2 or height < 2 or radius < 1 or min_coverage < 0 or min_coverage > 100:
        raise Exception("Wrong arguments")

    for (x_min, x_max, y_min, y_max) in rest_areas:
        if not (isinstance(x_min, int) and isinstance(x_max, int)
                and isinstance(y_min, int) and isinstance(y_max, int)):
            raise Exception('Input arguments should be integers')
        if x_min >= x_max or y_min >= y_max:
            raise Exception("Wrong rest_area arguments")
        if x_max > width or x_min < 0 or y_max > height or y_min < 0:
            raise Exception("Wrong rest_area arguments")

    Member.height = height
    Member.width = width
    Member.radius = radius
    Member.set_restricted_areas(rest_areas)
    Member.rest_areas = rest_areas

    precision = 1e-2  # to calculate intersect area
    start = time.perf_counter()
    (member, percent) = EvolutionaryAlgorithm.alg(width, height, radius,
                                                  min_coverage, precision,
                                                  rest_areas)
    stop = time.perf_counter()
    member.print_circles()

    formatted_percent = "{:.3f}".format(percent)
    print("Pokrycie tryskaczy wynosi " + formatted_percent + "%")
    print("Ilosc tryskaczy = " + str(member.circles.__len__()))
    print("Czas : " + str(stop - start))
Пример #2
0
def run(test_problem,
        max_iterations: int,
        number_of_runs: int,
        file_prefix: str,
        tol=-1,
        visualisation=False,
        aPreCallback=None,
        aPostCallback=None):
    global g_test_problem
    global g_iterations

    g_test_problem = test_problem

    # Store the results for each optimisation method
    columns = ['Run', 'Methods']
    for i in range(test_problem.number_of_dimensions):
        columns.append("X_" + str(i))

    columns.append("Objective value")
    columns.append("Euclidean distance")
    columns.append("Evaluations")

    df = pd.DataFrame(columns=columns)

    for run_id in range(number_of_runs):

        print("Run #", run_id)

        # Create a random guess common to all the optimisation methods
        initial_guess = g_test_problem.initialRandomGuess()

        # Optimisation methods implemented in scipy.optimize
        methods = [
            'Nelder-Mead', 'Powell', 'CG', 'BFGS', 'L-BFGS-B', 'TNC', 'COBYLA',
            'SLSQP'
        ]

        for method in methods:
            g_test_problem.number_of_evaluation = 0

            optimiser = ScipyMinimize(g_test_problem,
                                      method,
                                      tol=tol,
                                      initial_guess=initial_guess)
            print("\tOptimiser:", optimiser.full_name)

            if not isinstance(aPreCallback, (str, type(None))):
                aPreCallback(optimiser, file_prefix, run_id)

            optimiser.setMaxIterations(max_iterations)

            if run_id == 0 and visualisation:
                optimiser.plotAnimation(
                    aNumberOfIterations=max_iterations,
                    aCallback=None,
                    aFileName=(file_prefix + "_" + optimiser.short_name +
                               "_%d.png"))
            else:
                optimiser.run()

            df = appendResultToDataFrame(run_id, optimiser, df, columns,
                                         file_prefix)

            if not isinstance(aPostCallback, (str, type(None))):
                aPostCallback(optimiser, file_prefix, run_id)

        # Parameters for EA
        g_iterations = int(max_iterations / g_number_of_individuals)

        # Optimisation and visualisation
        g_test_problem.number_of_evaluation = 0
        optimiser = EvolutionaryAlgorithm(g_test_problem,
                                          g_number_of_individuals,
                                          initial_guess=initial_guess)
        print("\tOptimiser:", optimiser.full_name)
        if not isinstance(aPreCallback, (str, type(None))):
            aPreCallback(optimiser, file_prefix, run_id)

        # Set the selection operator
        #optimiser.setSelectionOperator(TournamentSelection(3));
        #optimiser.setSelectionOperator(RouletteWheel());
        optimiser.setSelectionOperator(RankSelection())

        # Create the genetic operators
        gaussian_mutation = GaussianMutationOperator(0.1, 0.3)
        elitism = ElitismOperator(0.1)
        new_blood = NewBloodOperator(0.0)
        blend_cross_over = BlendCrossoverOperator(0.6, gaussian_mutation)

        # Add the genetic operators to the EA
        optimiser.addGeneticOperator(new_blood)
        optimiser.addGeneticOperator(gaussian_mutation)
        optimiser.addGeneticOperator(blend_cross_over)
        optimiser.addGeneticOperator(elitism)

        if run_id == 0 and visualisation:
            optimiser.plotAnimation(
                aNumberOfIterations=g_iterations,
                aCallback=visualisationCallback,
                aFileName=(file_prefix + "_" + optimiser.short_name +
                           "_%d.png"))

        else:
            for _ in range(1, g_iterations):
                optimiser.runIteration()
                visualisationCallback()

        df = appendResultToDataFrame(run_id, optimiser, df, columns,
                                     file_prefix)

        if not isinstance(aPostCallback, (str, type(None))):
            aPostCallback(optimiser, file_prefix, run_id)

        # Parameters for PSO

        # Optimisation and visualisation
        g_test_problem.number_of_evaluation = 0
        optimiser = PSO(g_test_problem,
                        g_number_of_individuals,
                        initial_guess=initial_guess)
        print("\tOptimiser:", optimiser.full_name)
        if not isinstance(aPreCallback, (str, type(None))):
            aPreCallback(optimiser, file_prefix, run_id)

        if run_id == 0 and visualisation:
            optimiser.plotAnimation(
                aNumberOfIterations=g_iterations,
                aCallback=visualisationCallback,
                aFileName=(file_prefix + "_" + optimiser.short_name +
                           "_%d.png"))

        else:
            for _ in range(1, g_iterations):
                optimiser.runIteration()
                visualisationCallback()

        df = appendResultToDataFrame(run_id, optimiser, df, columns,
                                     file_prefix)

        if not isinstance(aPostCallback, (str, type(None))):
            aPostCallback(optimiser, file_prefix, run_id)

        # Optimisation and visualisation
        optimiser = PureRandomSearch(g_test_problem,
                                     max_iterations,
                                     initial_guess=initial_guess)
        print("\tOptimiser:", optimiser.full_name)
        if not isinstance(aPreCallback, (str, type(None))):
            aPreCallback(optimiser, file_prefix, run_id)

        g_test_problem.number_of_evaluation = 0

        if run_id == 0 and visualisation:
            optimiser.plotAnimation(
                aNumberOfIterations=max_iterations,
                aCallback=None,
                aFileName=(file_prefix + "_" + optimiser.short_name +
                           "_%d.png"))
        else:
            for _ in range(max_iterations):
                optimiser.runIteration()

        df = appendResultToDataFrame(run_id, optimiser, df, columns,
                                     file_prefix)

        if not isinstance(aPostCallback, (str, type(None))):
            aPostCallback(optimiser, file_prefix, run_id)

        # Optimisation and visualisation
        g_test_problem.number_of_evaluation = 0

        optimiser = SimulatedAnnealing(g_test_problem,
                                       5000,
                                       0.04,
                                       initial_guess=initial_guess)
        print("\tOptimiser:", optimiser.full_name)
        optimiser.cooling_schedule = cooling
        if not isinstance(aPreCallback, (str, type(None))):
            aPreCallback(optimiser, file_prefix, run_id)

        if run_id == 0 and visualisation:
            optimiser.plotAnimation(
                aNumberOfIterations=max_iterations,
                aCallback=None,
                aFileName=(file_prefix + "_" + optimiser.short_name +
                           "_%d.png"))
        else:
            for _ in range(1, max_iterations):
                optimiser.runIteration()
            #print(optimiser.current_temperature)

        df = appendResultToDataFrame(run_id, optimiser, df, columns,
                                     file_prefix)

        if not isinstance(aPostCallback, (str, type(None))):
            aPostCallback(optimiser, file_prefix, run_id)

    title_prefix = ""

    if g_test_problem.name != "":
        if g_test_problem.flag == 1:
            title_prefix = "Minimisation of " + g_test_problem.name + "\n"
        else:
            title_prefix = "Maximisation of " + g_test_problem.name + "\n"

    boxplot(df, 'Evaluations', title_prefix + 'Number of evaluations',
            file_prefix + 'evaluations.pdf', False)

    boxplot(
        df, 'Euclidean distance',
        title_prefix + 'Euclidean distance between\nsolution and ground truth',
        file_prefix + 'distance.pdf', False)

    plt.show()
Пример #3
0

def visualisationCallback():
    global g_current_sigma

    # Update the mutation variance so that it varies linearly from g_max_mutation_sigma to
    # g_min_mutation_sigma
    g_current_sigma -= (g_max_mutation_sigma -
                        g_min_mutation_sigma) / (g_iterations - 1)

    # Make sure the mutation variance is up-to-date
    gaussian_mutation.setMutationVariance(g_current_sigma)


# Optimisation and visualisation
optimiser = EvolutionaryAlgorithm(test_problem, g_number_of_individuals)

# Set the selection operator
#optimiser.setSelectionOperator(TournamentSelection(2));
#optimiser.setSelectionOperator(RouletteWheel());
optimiser.setSelectionOperator(RankSelection())

# Create the genetic operators
elitism = ElitismOperator(0.1)
new_blood = NewBloodOperator(0.1)
gaussian_mutation = GaussianMutationOperator(0.1, 0.2)
blend_cross_over = BlendCrossoverOperator(0.6, gaussian_mutation)

# Add the genetic operators to the EA
optimiser.addGeneticOperator(new_blood)
optimiser.addGeneticOperator(gaussian_mutation)
Пример #4
0
from BlendCrossoverOperator import *
from GaussianMutationOperator import *
from NewBloodOperator import *

# Import objective function
from TestProblem import *

# Create test problem
test_problem = TestProblem()

# Parameters for EA
number_of_individuals = 50
number_of_generation = 50

# Create the optimiser
optimiser = EvolutionaryAlgorithm(test_problem, number_of_individuals)

print("Initial best individual: ", optimiser.best_solution)

# Set the selection operator
#optimiser.setSelectionOperator(TournamentSelection(3));
#optimiser.setSelectionOperator(RouletteWheelSelection());
optimiser.setSelectionOperator(RankSelection())

# Create the genetic operators
elitism = ElitismOperator(0.1)
new_blood = NewBloodOperator(0.1)
gaussian_mutation = GaussianMutationOperator(0.1, 0.2)
blend_cross_over = BlendCrossoverOperator(0.6, gaussian_mutation)

# Add the genetic operators to the EA
    if not isinstance(args.save_input_images, NoneType):
        global_fitness_function.saveInputImages(args.save_input_images[0])

    # Parameters for EA
    number_of_individuals = args.pop_size[0]
    number_of_generation = args.generations[0]

    # Log messages
    if not isinstance(args.logging, NoneType):
        logging.debug("Number of angles: %i", number_of_angles)
        logging.debug("Peak value for the Poisson noise: %f", peak_value)
        logging.debug("Number of individuals: %i", number_of_individuals)
        logging.debug("Number of generations: %i", number_of_generation)

    # Create the optimiser
    optimiser = EvolutionaryAlgorithm(global_fitness_function,
                                      number_of_individuals)

    # Default tournament size
    tournament_size = 2

    # The tournament size is always two for dual
    if args.selection[0] == "dual":
        tournament_size = 2
    # Update the tournament size if needed
    elif not isinstance(args.tournament_size, NoneType):
        if isinstance(args.tournament_size, int):
            tournament_size = args.tournament_size
        else:
            tournament_size = args.tournament_size[0]

    # Set the selection operator
Пример #6
0
        round(global_fitness_function.image.sum() / (256 * 2)))
    number_of_generation = args.generations[0]

    if not isinstance(args.initial_pop_size, NoneType):
        number_of_individuals = args.initial_pop_size[0]

    # Log messages
    if not isinstance(args.logging, NoneType):
        logging.debug("Number of angles: %i", number_of_angles)
        logging.debug("Peak value for the Poisson noise: %f", peak_value)
        logging.debug("Number of individuals: %i", number_of_individuals)
        logging.debug("Number of generations: %i", number_of_generation)

    # Create the optimiser
    optimiser = EvolutionaryAlgorithm(local_fitness_function,
                                      number_of_individuals,
                                      global_fitness_function)

    # Default tournament size
    tournament_size = 2

    # The tournament size is always two for dual
    if args.selection[0] == "dual":
        tournament_size = 2
    # Update the tournament size if needed
    elif not isinstance(args.tournament_size, NoneType):
        if isinstance(args.tournament_size, int):
            tournament_size = args.tournament_size
        else:
            tournament_size = args.tournament_size[0]
Пример #7
0
        global_fitness.saveImage(
            parameter_set,
            file_prefix + optimiser.short_name + "_" + str(run_id) + ".txt")

    else:
        optimiser.objective_function.saveImage(
            optimiser.best_solution,
            file_prefix + optimiser.short_name + "_" + str(run_id) + ".txt")


# Optimisation and visualisation
global_fitness.number_of_evaluation = 0
local_fitness.number_of_evaluation = 0

g_number_of_individuals = global_fitness.number_of_lamps
optimiser = EvolutionaryAlgorithm(local_fitness, g_number_of_individuals,
                                  global_fitness)
optimiser.full_name = "Fly algorithm"
optimiser.short_name = "FA"

g_max_mutation_sigma = 0.1
g_min_mutation_sigma = 0.01

g_current_sigma = g_max_mutation_sigma

# Set the selection operator
tournament_selection = TournamentSelection(2)
threshold_selection = ThresholdSelection(0.0, tournament_selection,
                                         round(0.25 * g_number_of_individuals))

optimiser.setSelectionOperator(threshold_selection)
#optimiser.setSelectionOperator(tournament_selection);
# Genetic operators
from ElitismOperator          import *
from BlendCrossoverOperator   import *
from GaussianMutationOperator import *
from NewBloodOperator         import *

g_number_of_individuals = 20;
g_iterations            = 40;

g_max_mutation_sigma = 0.1;
g_min_mutation_sigma = 0.01;


# Create an EA
optimiser = EvolutionaryAlgorithm(AckleyFunction(), g_number_of_individuals)

# Set the selection operator
#optimiser.setSelectionOperator(TournamentSelection(2));
#optimiser.setSelectionOperator(RouletteWheel());
optimiser.setSelectionOperator(RankSelection());

# Create the genetic operators
elitism = ElitismOperator(0.1);
new_blood = NewBloodOperator(0.3);
gaussian_mutation = GaussianMutationOperator(0.1, 0.4);
blend_cross_over = BlendCrossoverOperator(0.5, gaussian_mutation);

# Add the genetic operators to the EA
optimiser.addGeneticOperator(new_blood);
optimiser.addGeneticOperator(gaussian_mutation);

    # Log messages
    if not isinstance(args.logging, NoneType):
        logging.debug("Weight: %f",                args.weight[0])
        logging.debug("Radius: %i",                args.radius[0])
        logging.debug("Room width: %i",            args.room_width[0])
        logging.debug("Room height: %i",           args.room_height[0])
        logging.debug("Number of lamps: %i",       args.number_of_lamps[0])
        logging.debug("Number of individuals: %i", number_of_individuals)
        logging.debug("Number of generations: %i", number_of_iterations)
        logging.debug("Problem size: %f", global_fitness_function.getProblemSize());

    # Create the optimiser
    # Create the optimiser
    optimiser = EvolutionaryAlgorithm(local_fitness_function,
        number_of_individuals, global_fitness_function);

    global_fitness_function.average_fitness_set.append(optimiser.average_objective_value);
    global_fitness_function.best_fitness_set.append(global_fitness_function.global_fitness_set[-1]);
    global_fitness_function.number_of_lamps_set.append(global_fitness_function.number_of_lamps_set[-1]);


    # Default tournament size
    tournament_size = 2;

    # The tournament size is always two for dual
    if args.selection[0] == "dual":
        tournament_size = 2;
    # Update the tournament size if needed
    elif not isinstance(args.tournament_size, NoneType):
        if isinstance(args.tournament_size, int):

    # Log messages
    if not isinstance(args.logging, NoneType):
        logging.debug("Weight: %f",                args.weight[0])
        logging.debug("Radius: %i",                args.radius[0])
        logging.debug("Room width: %i",            args.room_width[0])
        logging.debug("Room height: %i",           args.room_height[0])
        logging.debug("Number of lamps: %i",       args.number_of_lamps[0])
        logging.debug("Number of individuals: %i", number_of_individuals)
        logging.debug("Number of generations: %i", number_of_iterations)
        logging.debug("Problem size: %f", global_fitness_function.getProblemSize());

    # Create the optimiser
    # Create the optimiser
    optimiser = EvolutionaryAlgorithm(global_fitness_function,
        number_of_individuals);

    global_fitness_function.average_fitness_set.append(optimiser.average_objective_value);
    global_fitness_function.best_fitness_set.append(global_fitness_function.global_fitness_set[-1]);
    global_fitness_function.number_of_lamps_set.append(global_fitness_function.getNumberOfLamps(optimiser.best_solution.parameter_set));


    # Default tournament size
    tournament_size = 2;

    # The tournament size is always two for dual
    if args.selection[0] == "dual":
        tournament_size = 2;
    # Update the tournament size if needed
    elif not isinstance(args.tournament_size, NoneType):
        if isinstance(args.tournament_size, int):
    global_fitness_without_fly = LP.computeFitnessFunction()

    if True:
        #if aSetOfGenes[2] > 0.5:
        LP.addLampToImage(int(aSetOfGenes[0]), int(aSetOfGenes[1]), 1)

        local_fitness = LP.global_fitness - global_fitness_without_fly

        #print(global_fitness_without_fly, LP.global_fitness, local_fitness)

    return local_fitness


optimiser = EA.EvolutionaryAlgorithm(len(boundaries), boundaries,
                                     localFitnessFunction,
                                     g_number_of_individuals,
                                     LP.fitnessFunction)
tournament = TournamentSelection(2)
optimiser.setSelectionOperator(tournament)
print(optimiser.selection_operator)

elitism = ElitismOperator(0.0)
new_blood = NewBloodOperator(0.3)
gaussian_mutation = GaussianMutationOperator(0.5, 0.4)
blend_cross_over = BlendCrossoverOperator(0.1, gaussian_mutation)

optimiser.addGeneticOperator(new_blood)
optimiser.addGeneticOperator(gaussian_mutation)
optimiser.addGeneticOperator(blend_cross_over)
optimiser.addGeneticOperator(elitism)
Пример #12
0

def visualisationCallback():
    global g_current_sigma;

    # Update the mutation variance so that it varies linearly from g_max_mutation_sigma to
    # g_min_mutation_sigma
    g_current_sigma -= (g_max_mutation_sigma - g_min_mutation_sigma) / (g_iterations - 1);

    # Make sure the mutation variance is up-to-date
    gaussian_mutation.setMutationVariance(g_current_sigma);
    print(g_current_sigma)


# Create an EA
optimiser = EvolutionaryAlgorithm(AckleyFunction(), g_number_of_individuals)

# Set the selection operator
#optimiser.setSelectionOperator(TournamentSelection(2));
#optimiser.setSelectionOperator(RouletteWheel());
optimiser.setSelectionOperator(RankSelection());

# Create the genetic operators
elitism = ElitismOperator(0.1);
new_blood = NewBloodOperator(0.3);
gaussian_mutation = GaussianMutationOperator(0.1, 0.4);
blend_cross_over = BlendCrossoverOperator(0.5, gaussian_mutation);

# Add the genetic operators to the EA
optimiser.addGeneticOperator(new_blood);
optimiser.addGeneticOperator(gaussian_mutation);
Пример #13
0
    initial_guess = pd.read_csv(args.results_csv, usecols=['Parameters'])
    initial_guess = dataFrameToFloat(initial_guess['Parameters'][0])

    # for ini in range(len(args.initial_guess)):
    #     initial_guess.append(float(args.initial_guess[ini]));
    param = 'All'

    g_number_of_individuals = args.individuals
    g_iterations = args.generations

    g_max_mutation_sigma = args.max_mutation_sigma
    g_min_mutation_sigma = args.min_mutation_sigma

    objective_function = HandFunction(target_image, number_of_params)
    optimiser = EvolutionaryAlgorithm(objective_function,
                                      g_number_of_individuals,
                                      initial_guess=initial_guess)
    optimiser.setSelectionOperator(RankSelection())

    # Create the genetic operators
    elitism = ElitismOperator(args.elitism)
    new_blood = NewBloodOperator(args.new_blood)
    gaussian_mutation = GaussianMutationOperator(args.gaussian_mutation[0],
                                                 args.gaussian_mutation[1])
    blend_cross_over = BlendCrossoverOperator(args.blend_cross_over,
                                              gaussian_mutation)

    # Add the genetic operators to the EA
    optimiser.addGeneticOperator(new_blood)
    optimiser.addGeneticOperator(gaussian_mutation)
    optimiser.addGeneticOperator(blend_cross_over)