Exemplo n.º 1
0
    def init_pheromone_matrix(self):
        """Init the pheromone matrix with nearest neighbor algorithm."""
        self.nn_solution = dynamic_nearest_neighbor(self.weights,
                                                    self.N_CITIES)
        self.nn_cost = dynamic_objfun(self.nn_solution, self.weights,
                                      self.N_CITIES)

        self.TAO_0 = 1 / (self.nn_cost * self.N_CITIES)
        self.pheromone_matrix = np.full((self.N_CITIES, self.N_CITIES),
                                        self.TAO_0)
Exemplo n.º 2
0
 def _compute_nn(self):
     """Inner function: update the nn_solution."""
     if self.is_static:
         self.nn_solution = nearest_neighbor(self.weights)
         self.nn_cost = static_objfun(self.nn_solution, self.weights,
                                      self.N_CITIES)
     else:
         self.nn_solution = dynamic_nearest_neighbor(self.weights)
         self.nn_cost = dynamic_objfun(self.nn_solution, self.weights,
                                       self.N_CITIES)
Exemplo n.º 3
0
    def helper_aio(self, k):
        original_cost = self.new_costs[k]
        epsilon_bi = epsilon_gi = 0

        for io_iteration in range(self.IO_MAX_ITER):
            c_old_best = self.new_costs[k]
            if random() < self.p_bi:
                new_best = inver_over(self.new_solutions[k], 1.0,
                                      self.new_solutions, self.weights,
                                      self.N_CITIES)
                c_new_best = dynamic_objfun(new_best, self.weights,
                                            self.N_CITIES)
                epsilon_bi = abs(c_new_best - c_old_best) / c_old_best
                self.global_n_bi_inversions[
                    self.iteration] += 1  # Increment the statistics.
            else:
                new_best = inver_over(self.new_solutions[k], 0.0,
                                      self.new_solutions, self.weights,
                                      self.N_CITIES)
                c_new_best = dynamic_objfun(new_best, self.weights,
                                            self.N_CITIES)
                epsilon_gi = abs(c_new_best - c_old_best) / c_old_best
                self.global_n_gi_inversions[
                    self.iteration] += 1  # Increment the statistics.

            # May I have update the sol?
            if c_new_best < c_old_best:
                self.new_solutions[k] = new_best
                self.new_costs[k] = c_new_best

        # update the statistics.
        if self.new_costs[k] < original_cost:
            self.ls_number_of_improvements[
                k,
                self.iteration] += 1  # increment the number of improvements.
            self.ls_rate_of_improvements[k, self.iteration] = abs(
                original_cost - self.new_costs[k])

        # Recompute the p_bi and p_gi
        self.p_bi = self.global_p_bi[self.iteration] = (self.p_bi + epsilon_bi) / \
                                                       (self.p_bi + epsilon_bi + self.p_gi + epsilon_gi)
        self.p_gi = self.global_p_gi[self.iteration] = 1 - self.p_bi
Exemplo n.º 4
0
 def init_population(self):
     """Init randomly the population. THe size of the population is expressed by POP_SIZE and the range
     with n."""
     self.population = []
     for i in range(self.POP_SIZE):
         self.population.append(
             random.sample(range(self.N_CITIES), self.N_CITIES))
         cost = dynamic_objfun(self.weights, self.population[-1])
         if cost < self.best_cost:
             self.best_cost = cost
             self.best_solution = self.population[-1]
Exemplo n.º 5
0
 def compute_population_cost(self):
     self.population_cost = []
     if self.is_static:
         for individual in self.population:
             self.population_cost.append(
                 static_objfun(individual, self.weights, self.N_CITIES))
         self.population_cost = np.array(self.population_cost)
     else:
         for individual in self.population:
             self.population_cost.append(
                 dynamic_objfun(individual, self.weights, self.N_CITIES))
         self.population_cost = np.array(self.population_cost)
Exemplo n.º 6
0
    def run_nn(self, iterations=5):
        for i in range(iterations):
            if self.is_static:
                sol = nearest_neighbor(self.weights, self.N_CITIES)
                cost = static_objfun(sol, self.weights, self.N_CITIES)
            else:
                sol = dynamic_nearest_neighbor(self.weights, self.N_CITIES)
                cost = dynamic_objfun(sol, self.weights, self.N_CITIES)

            if cost < self.best_cost:
                self.best_sol = sol
                self.best_cost = cost
        return self.best_cost, self.best_sol
Exemplo n.º 7
0
    def inver_over_algorithm(self):
        """Main of the inver over algorithm."""
        # local best.
        best_loc_sol = None
        best_loc_cost = float('Inf')

        seed()
        for idx in range(self.POP_SIZE):
            individual_i = self.population[idx]
            individual_tmp = individual_i.copy()
            g = choice(individual_tmp
                       )  # select randomly a gene from the individual_tmp

            while True:
                # select the gene g_prime.
                g_prime = self.city_selection(g, individual_tmp)

                # if g is close to g_prime
                if individual_tmp[(individual_tmp.index(g) + 1) %
                                  self.N_CITIES] == g_prime or individual_tmp[
                                      (individual_tmp.index(g) - 1) %
                                      self.N_CITIES] == g_prime:
                    break
                else:
                    self.inversion(individual_tmp, g, g_prime)

                # check if the while has produced some better solution.
                if self.is_static:
                    cost_tmp = static_objfun(individual_tmp, self.weights,
                                             self.N_CITIES)
                else:
                    cost_tmp = dynamic_objfun(individual_tmp, self.weights,
                                              self.N_CITIES)

                if cost_tmp < static_objfun(individual_i, self.weights,
                                            self.N_CITIES):
                    self.population[idx] = individual_tmp
                    individual_i = individual_tmp
                    cost_i = cost_tmp
                    self.population_cost[idx] = cost_tmp

                # update local best
                if cost_i < best_loc_cost:
                    best_loc_sol = individual_i
                    best_loc_cost = cost_i

        return best_loc_cost, best_loc_sol
Exemplo n.º 8
0
    def run_ants(self):
        """Main function. Run the ants."""
        while self.iteration < self.MAX_GENERATIONS:
            print(self.iteration)
            seed()  # new seed for each new iteration.
            # (re)init new solutions.
            self.new_solutions, self.available = init_ant_position(
                self.N_ANTS, self.N_CITIES)
            self.times = np.zeros(self.N_ANTS)

            self.new_costs = np.zeros(self.N_ANTS)
            for k in range(self.N_ANTS):
                t1 = time()
                self.state_transition_rule(k)
                self.times[k] = time() - t1

                self.new_costs[k] = dynamic_objfun(self.new_solutions[k],
                                                   self.weights, self.N_CITIES)

            idx_loc_min = np.argmin(self.new_costs)

            # Perform the local search on the best new solution.
            self.adaptive_inver_over(idx_loc_min)
            #self.local_search(idx_loc_min)

            # Pheromone update
            self.pheromone_update()

            # scope of the next group of solutions.
            for new_sol in self.new_solutions:
                self.solutions.append(new_sol)

            # Update the new global max
            if self.new_costs[idx_loc_min] < self.global_best_cost:
                self.global_best_cost = self.new_costs[idx_loc_min]
                self.global_best_solution = self.new_solutions[idx_loc_min]

            # update the statistics
            self.update_statistics()

            self.iteration = self.iteration + 1

        # print("Diversity_rate={}".format(diversity(self.solutions[0:len(self.solutions):10], len(self.solutions[0:len(self.solutions):10]), self.N_CITIES)))
        return self.global_best_cost, self.global_best_solution
Exemplo n.º 9
0
    def inver_over_algorithm(self):
        """Main of the inver over algorithm."""
        # local best.
        timer = 0

        seed()
        for idx in range(self.POP_SIZE):
            individual_tmp = self.population[idx].copy()
            g = choice(individual_tmp
                       )  # select randomly a gene from the individual_tmp

            length = len(individual_tmp)

            while timer < 100:
                # select the gene g_prime.
                g_prime = self.city_selection(g, individual_tmp)

                # if g is close to g_prime
                if individual_tmp[(individual_tmp.index(g) + 1) %
                                  length] == g_prime or individual_tmp[
                                      (individual_tmp.index(g) - 1) %
                                      length] == g_prime:
                    break
                else:
                    self.inversion(individual_tmp, g, g_prime)

                # check if the while has produced some better solution.
                cost = dynamic_objfun(individual_tmp, self.weights,
                                      self.N_CITIES)
                if cost < self.population_cost[idx]:
                    self.population[idx] = individual_tmp
                    self.population_cost[idx] = cost
                    timer = 0
                else:
                    timer += 1

        best_loc_idx = np.argmin(self.population_cost)
        return self.population_cost[best_loc_idx], self.population[
            best_loc_idx]