Ejemplo n.º 1
0
    def test_calculate_updates():
        """Test calculate_updates method"""

        X = np.array([[0, 1, 0, 1], [0, 0, 0, 0], [1, 1, 1, 1], [1, 1, 1, 1],
                      [0, 0, 1, 1], [1, 0, 0, 0]])

        y = np.reshape(np.array([1, 1, 0, 0, 1, 1]), [6, 1])

        nodes = [4, 2, 1]

        fitness = NetworkWeights(X,
                                 y,
                                 nodes,
                                 activation=identity,
                                 bias=False,
                                 is_classifier=False,
                                 learning_rate=1)

        a = list(np.arange(8) + 1)
        b = list(0.01 * (np.arange(2) + 1))

        weights = a + b
        fitness.evaluate(weights)

        problem = ContinuousOpt(10, fitness, maximize=False)

        updates = problem.calculate_updates()

        update1 = np.array([[-0.0017, -0.0034], [-0.0046, -0.0092],
                            [-0.0052, -0.0104], [0.0014, 0.0028]])

        update2 = np.array([[-3.17], [-4.18]])

        assert (np.allclose(updates[0], update1, atol=0.001)
                and np.allclose(updates[1], update2, atol=0.001))
Ejemplo n.º 2
0
    def test_random():
        """Test random method"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=4)

        rand = problem.random()

        assert (len(rand) == 5 and max(rand) >= 0 and min(rand) <= 4)
Ejemplo n.º 3
0
    def test_reproduce_mut0():
        """Test reproduce method when mutation_prob is 0"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=1,
                                step=1)
        father = np.array([0, 0, 0, 0, 0])
        mother = np.array([1, 1, 1, 1, 1])

        child = problem.reproduce(father, mother, mutation_prob=0)

        assert (len(child) == 5 and sum(child) > 0 and sum(child) < 5)
Ejemplo n.º 4
0
    def test_reproduce_mut1_range_gt_step():
        """Test reproduce method when mutation_prob is 1 and range is
        greater than step size"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=2,
                                step=1)
        father = np.array([0, 0, 0, 0, 0])
        mother = np.array([2, 2, 2, 2, 2])

        child = problem.reproduce(father, mother, mutation_prob=1)

        assert (len(child) == 5 and sum(child) > 0 and sum(child) < 10)
Ejemplo n.º 5
0
    def test_random_neighbor_range_eq_step():
        """Test random_neighbor method when range equals step size"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=1,
                                step=1)

        x = np.array([0, 0, 1, 1, 1])
        problem.set_state(x)

        neigh = problem.random_neighbor()
        sum_diff = np.sum(np.abs(x - neigh))

        assert (len(neigh) == 5 and sum_diff == 1)
Ejemplo n.º 6
0
    def test_hill_climb_continuous_min():
        """Test hill_climb function for a continuous minimization problem"""

        problem = ContinuousOpt(5, OneMax(), maximize=False)
        best_state, best_fitness, _ = hill_climb(problem, restarts=20)

        x = np.array([0, 0, 0, 0, 0])

        assert (np.array_equal(best_state, x) and best_fitness == 0)
Ejemplo n.º 7
0
    def test_genetic_alg_continuous_min():
        """Test genetic_alg function for a continuous minimization problem"""

        problem = ContinuousOpt(5, OneMax(), maximize=False)
        best_state, best_fitness, _ = genetic_alg(problem, max_attempts=200)

        x = np.array([0, 0, 0, 0, 0])

        assert (np.allclose(best_state, x, atol=0.5) and best_fitness < 1)
Ejemplo n.º 8
0
    def test_update_state_in_range():
        """Test update_state method where all updated values are within the
        tolerated range"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=20,
                                step=1)

        x = np.array([0, 1, 2, 3, 4])
        problem.set_state(x)

        y = np.array([2, 4, 6, 8, 10])
        updated = problem.update_state(y)

        assert np.array_equal(updated, (x + y))
Ejemplo n.º 9
0
    def test_simulated_annealing_continuous_min():
        """Test simulated_annealing function for a continuous minimization
        problem"""

        problem = ContinuousOpt(5, OneMax(), maximize=False)
        best_state, best_fitness, _ = simulated_annealing(problem,
                                                          max_attempts=50)

        x = np.array([0, 0, 0, 0, 0])

        assert (np.array_equal(best_state, x) and best_fitness == 0)
Ejemplo n.º 10
0
    def test_update_state_outside_range():
        """Test update_state method where some updated values are outside the
        tolerated range"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=5,
                                step=1)

        x = np.array([0, 1, 2, 3, 4])
        problem.set_state(x)

        y = np.array([2, -4, 6, -8, 10])
        updated = problem.update_state(y)

        z = np.array([2, 0, 5, 0, 5])

        assert np.array_equal(updated, z)
Ejemplo n.º 11
0
    def test_random_neighbor_range_gt_step():
        """Test random_neighbor method when range greater than step size"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=2,
                                step=1)

        x = np.array([0, 1, 2, 3, 4])
        problem.set_state(x)

        neigh = problem.random_neighbor()
        abs_diff = np.abs(x - neigh)
        abs_diff[abs_diff > 0] = 1

        sum_diff = np.sum(abs_diff)

        assert (len(neigh) == 5 and sum_diff == 1)
Ejemplo n.º 12
0
    def test_random_hill_climb_continuous_max():
        """Test random_hill_climb function for a continuous maximization
        problem"""

        problem = ContinuousOpt(5, OneMax(), maximize=True)
        best_state, best_fitness, _ = random_hill_climb(problem,
                                                        max_attempts=10,
                                                        restarts=20)

        x = np.array([1, 1, 1, 1, 1])

        assert (np.array_equal(best_state, x) and best_fitness == 5)
Ejemplo n.º 13
0
    def test_random_pop():
        """Test random_pop method"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=1,
                                step=1)
        problem.random_pop(100)

        pop = problem.get_population()
        pop_fitness = problem.get_pop_fitness()

        assert (np.shape(pop)[0] == 100 and np.shape(pop)[1] == 5
                and np.sum(pop) > 0 and np.sum(pop) < 500
                and len(pop_fitness) == 100)
Ejemplo n.º 14
0
    def test_find_neighbors_range_gt_step():
        """Test find_neighbors method when range greater than step size"""

        problem = ContinuousOpt(5,
                                OneMax(),
                                maximize=True,
                                min_val=0,
                                max_val=2,
                                step=1)

        x = np.array([0, 1, 2, 1, 0])
        problem.set_state(x)
        problem.find_neighbors()

        neigh = np.array([[1, 1, 2, 1, 0], [0, 0, 2, 1, 0], [0, 2, 2, 1, 0],
                          [0, 1, 1, 1, 0], [0, 1, 2, 0, 0], [0, 1, 2, 2, 0],
                          [0, 1, 2, 1, 1]])

        assert np.array_equal(np.array(problem.neighbors), neigh)
Ejemplo n.º 15
0
    def rate(self, co_type: str, co_value: float):
        """Calculates the preference of characteristic objects using the defined stochastic method.

        Parameters
        ----------
        co_type: str
            Type of preference values of objects characteristic of the COMET method
        co_value: float
            The value of preferences of objects characteristic of the COMET method
        Returns
        -------

        """

        model = Comet(self._criteria)
        model.generate_co()
        model.rate_co(co_type, co_value)

        dict_arg = {
            'model': model,
            'alternatives': self._alternatives,
            'preference': self._alternativesPreference
        }

        if self._stochasticMethod == "hill-climbing":
            problem = ContinuousOpt(model.get_co_len(),
                                    CustomFitness(self._mlrose_fitness,
                                                  **dict_arg),
                                    maximize=False,
                                    step=0.01)
            pos, _, cost_history = hill_climb(
                problem,
                max_iters=self._iterations,
                curve=True,
                init_state=model.get_co_preference())
            cost_history = np.abs(cost_history)

        elif self._stochasticMethod == "simulated-annealing":
            problem = ContinuousOpt(model.get_co_len(),
                                    CustomFitness(self._mlrose_fitness,
                                                  **dict_arg),
                                    maximize=False,
                                    step=0.01)
            pos, _, cost_history = simulated_annealing(
                problem,
                max_iters=self._iterations,
                curve=True,
                init_state=model.get_co_preference())
            cost_history = np.abs(cost_history)

        elif self._stochasticMethod == "pso":
            bound_max = np.ones(model.get_co_len())
            bound_min = np.zeros(model.get_co_len())
            bounds = (bound_min, bound_max)
            options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9}

            optimizer = single.GlobalBestPSO(n_particles=20,
                                             dimensions=model.get_co_len(),
                                             options=options,
                                             bounds=bounds)
            cost, pos = optimizer.optimize(self._pso_fitness, self._iterations,
                                           **dict_arg)
            cost_history = optimizer.cost_history

        else:
            raise ValueError(
                "Wrong optimization method has been determined: %s" %
                (repr(self._stochasticMethod)))

        return pos, cost_history