Beispiel #1
0
    for i in range(5):
        result = genetic_algorithm(fitness_function,
                                   True,
                                   DEFAULTS,
                                   funcs,
                                   population_size=pop_size)
        pop_size_results.append({
            "x":
            pop_size,
            "y":
            f(result[1]) if result[1] is not False else False
        })

# In[81]:

plot_2d_batch_accuracy("Profit", "Population size", False, pop_size_results)

# In[92]:

# Change pop size default to 1500 as it seems greater amounts of individuals yields better results, from the last test.
DEFAULTS.population_size = 1500

# Batch testing with fitness upper bound
fub_results = []
for f_u_b in range(20):
    fitness_upper_bound = (f_u_b + 1) * 0.05
    for i in range(5):
        result = genetic_algorithm(fitness_function,
                                   True,
                                   DEFAULTS,
                                   funcs,
Beispiel #2
0
                                     neighbourhood_func, temp_reduc_func,
                                     acc_prob_func, stop_cond, max_i,
                                     max_epoch)[1]
    accuracy_wth_respect_to_starting_position.append({
        "x":
        start,
        "y":
        s_a_result,
        "diff_to_target":
        abs(101 - s_a_result)
    })

# In[11]:

batch_plt.plot_2d_batch_accuracy("final solution (s_n)",
                                 "starting point (s_0)", 101,
                                 accuracy_wth_respect_to_starting_position)

# In[9]:

#Run the simulated annealing on a range of different starting temperatures and temperature reduction gradients


def accuracy_with_temp(s_0):
    results = []
    for i in range(50):
        start_temp = i * 20 + 1
        for j in range(50):
            temp_gradient = (1 + j) / 52

            def linear_temp_reduction_f(t):
Beispiel #3
0

# In[8]:


# Batch test gradient descent with different starting points
starting_point_results = []
for start_x in range(80, 120):
    result = gradient_descent(derived_problem_function, start_x, max_i, step_m, e_g, e_x)
    starting_point_results.append({"x": start_x, "y": result[1]})


# In[9]:


batch_plt.plot_2d_batch_accuracy("finish point (x_n)", "starting point (x_0)", 101, starting_point_results)


# In[10]:


# Batch test gradient descent with different max_iterations
starting_points = [80, 101, 120]
for starting_point in starting_points:
    iteration_results = []
    for max_iter in range(40):
        result = gradient_descent(derived_problem_function, starting_point, max_iter, step_m, e_g, e_x)
        iteration_results.append({"x": max_iter, "y": result[1]})

    print("Starting at {}".format(starting_point))
    batch_plt.plot_2d_batch_accuracy("finish point (x_n)", "iterations (max_i)", 101, iteration_results)
Beispiel #4
0
end_point = []
spiral = n_dim_spiral({"x1": 0, "x2": 0, "x3": 0, "x4": 0}, 1000, 0.1)
for j in range(len(spiral)):
    ps = spiral[j]
    result = taboo_search(f, True, constraints, DEFAULTS, s_0=ps)
    end_point.append(result[1])
    starting_point_results.append({
        "x":
        j,
        "y":
        f(result[1]) if result[1] is not False else False
    })

# In[8]:

batch_plt.plot_2d_batch_accuracy("profit: f(s_n)", "starting point: s_0",
                                 False, starting_point_results)

# In[13]:

end_point[0:10]

# In[10]:

print_all_constraints(
    {
        'x1': 1.0999999999999999,
        'x2': 0.5,
        'x3': 1.0999999999999999,
        'x4': 1.0999999999999999
    }, constraints)
Beispiel #5
0
# In[6]:


# Imports my plotting module
import batch_plotting as batch_plt


# In[7]:


# Batch test taboo search with different starting points
starting_point_results = []
for start_x in range(80, 120):
    result = taboo_search(f, start_x, stop_function, taboo_memory, get_neighbourhood, False, stop_args={"max_i": max_i}, neighbourhood_args={"step_size": step_size})
    starting_point_results.append({"x": start_x, "y": result[1]})
batch_plt.plot_2d_batch_accuracy("finish point (x_n)", "starting point (x_0)", 101, starting_point_results)


# In[8]:


# Batch test gradient descent with different max_iterations (over 3 starting points)
starting_points = [80, 101, 120]
for starting_point in starting_points:
    iteration_results = []
    for max_i in range(40):
        max_iter = (max_i+1)*5
        result = taboo_search(f, starting_point, stop_function, taboo_memory, get_neighbourhood, False, stop_args={"max_i": max_iter}, neighbourhood_args={"step_size": step_size})
        iteration_results.append({"x": max_iter, "y": result[1]})

    print("Starting at {}".format(starting_point))
Beispiel #6
0
# Batch testing start position
starting_point_results = []
spiral = n_dim_spiral({"x1": 0, "x2": 0, "x3": 0, "x4": 0}, 2000, 0.05)
for i in range(len(spiral)):
    ps = spiral[i]
    result = gradient_descent(pds, constraints, DEFAULTS, x_0s=ps)
    starting_point_results.append({
        "x":
        i,
        "y":
        f(result[1]) if result[1] is not False else False
    })

# In[9]:

batch_plt.plot_2d_batch_accuracy("profit f(cx)", "starting point (cx)", False,
                                 starting_point_results)

# In[10]:

spiral[0]

# In[11]:

starting_point_results

# In[12]:

best = gradient_descent(pds,
                        constraints,
                        DEFAULTS,
                        x_0s=spiral[0],
Beispiel #7
0
# In[20]:

from batch_plotting import plot_2d_batch_accuracy, plot_3d_batch_accuracy

# In[21]:

# Batch testing with population size
pop_size_results = []
for ps in range(30):
    pop_size = 50 * (ps + 1)
    result = genetic_algorithm(pop_size, EPOCHS, FITNESS_UPPER_BOUND,
                               selection_func, CROSS_OVER_AMOUNT,
                               MUTATION_CHANCE, SIGN_CHANGE_CHANCE)
    pop_size_results.append({"x": pop_size, "y": result[1]})

plot_2d_batch_accuracy("Final solution", "Population size", 101,
                       pop_size_results)

# In[22]:

# CHange pop size default to 1000 as it seems more individuals is better from last test.
POP_SIZE = 1000

# Batch testing with fitness upper bound
fub_results = []
for f in range(20):
    fitness_upper_bound = (f + 1) * 0.05
    result = genetic_algorithm(POP_SIZE, EPOCHS, fitness_upper_bound,
                               selection_func, CROSS_OVER_AMOUNT,
                               MUTATION_CHANCE, SIGN_CHANGE_CHANCE)
    fub_results.append({"x": fitness_upper_bound, "y": result[1]})