Exemple #1
0
def gbest_reset():
    """Returns a GlobalBestPSO instance that has been run and reset to check
    default value"""
    pso = GlobalBestPSO(10, 2, {'c1': 0.5, 'c2': 0.7, 'w': 0.5})
    pso.optimize(sphere_func, 10, verbose=0)
    pso.reset()
    return pso
Exemple #2
0
    def test_reset_best_pos_none(self):
        """Tests if best pos is set to NoneType when reset() is called"""
        # Perform a simple optimization
        optimizer = GlobalBestPSO(5,2, options=self.options)
        optimizer.optimize(sphere_func, 100, verbose=0)

        optimizer.reset()
        self.assertIsNone(optimizer.best_pos)
Exemple #3
0
 def test_ftol_effect(self):
     """Check if setting ftol breaks the optimization process
     accordingly."""
     # Perform a simple optimization
     optimizer = GlobalBestPSO(10,2, options=self.options, ftol=1e-1)
     optimizer.optimize(sphere_func, 2000, verbose=0)
     cost_hist = optimizer.get_cost_history
     self.assertNotEqual(cost_hist.shape, (2000, ))
Exemple #4
0
    def test_reset_best_cost_inf(self):
        """Tests if best cost is set to infinity when reset() is called"""
        # Perform a simple optimization
        optimizer = GlobalBestPSO(5,2, options=self.options)
        optimizer.optimize(sphere_func, 100, verbose=0)

        optimizer.reset()
        self.assertEqual(optimizer.best_cost, np.inf)
 def test_reset(self):
     """Tests if the reset method resets the attributes required"""
     # Perform a simple optimization
     optimizer = GlobalBestPSO(5,2,options=self.options)
     optimizer.optimize(sphere_func, 100, verbose=0)
     # Reset the attributes
     optimizer.reset()
     # Perform testing
     self.assertEqual(optimizer.best_cost, np.inf)
     self.assertIsNone(optimizer.best_pos)
Exemple #6
0
def test_global_kwargs(func):
    """Tests if kwargs are passed properly to the objective function for when kwargs are present"""

    # setup optimizer
    options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2}

    x_max = 10 * np.ones(2)
    x_min = -1 * x_max
    bounds = (x_min, x_max)
    opt_ps = GlobalBestPSO(n_particles=100,
                           dimensions=2,
                           options=options,
                           bounds=bounds)

    # run it
    cost, pos = opt_ps.optimize(func,
                                1000,
                                print_step=10,
                                verbose=3,
                                a=1,
                                b=100)

    assert np.isclose(cost, 0, rtol=1e-03)
    assert np.isclose(pos[0], 1.0, rtol=1e-03)
    assert np.isclose(pos[1], 1.0, rtol=1e-03)
 def test_global_correct_pos(self, options):
     """ Test to check global optimiser returns the correct position corresponding to the best cost """
     opt = GlobalBestPSO(n_particles=10, dimensions=2, options=options)
     cost, pos = opt.optimize(sphere, iters=5)
     # find best pos from history
     min_cost_idx = np.argmin(opt.cost_history)
     min_pos_idx = np.argmin(sphere(opt.pos_history[min_cost_idx]))
     assert np.array_equal(opt.pos_history[min_cost_idx][min_pos_idx], pos)
Exemple #8
0
    def test_orct12_optimization(self):
        bayer = self.datasetUtils.readCFAImages()

        twoComplement = self.datasetUtils.twoComplementMatrix(bayer)
        twoComplement = twoComplement.astype("float32")

        options = {'c1': 0.5, 'c2': 0.1, 'w': 0.9}
        optimizer = GlobalBestPSO(n_particles=10,
                                  dimensions=4,
                                  options=options)

        costFunction = partial(self.opt_func, twoComplement=twoComplement)
        cost, pos = optimizer.optimize(costFunction, iters=30)

        pass
Exemple #9
0
def test_global_missed_kwargs(func):
    """Tests kwargs are passed the objective function for when kwargs do not exist"""

    # setup optimizer
    options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2}

    x_max = 10 * np.ones(2)
    x_min = -1 * x_max
    bounds = (x_min, x_max)
    opt_ps = GlobalBestPSO(n_particles=100,
                           dimensions=2,
                           options=options,
                           bounds=bounds)

    # run it
    with pytest.raises(TypeError) as excinfo:
        cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, a=1)
        assert 'missing 1 required positional argument' in str(excinfo.value)
def test_global_wrong_kwargs(func):
    """Tests kwargs are passed the objective function for when kwargs do not exist"""

    # setup optimizer
    options = {"c1": 0.5, "c2": 0.3, "w": 0.9, "k": 2, "p": 2}

    x_max = 10 * np.ones(2)
    x_min = -1 * x_max
    bounds = (x_min, x_max)
    opt_ps = GlobalBestPSO(n_particles=100,
                           dimensions=2,
                           options=options,
                           bounds=bounds)

    # run it
    with pytest.raises(TypeError) as excinfo:
        cost, pos = opt_ps.optimize(func, 1000, c=1, d=100)
        assert "unexpected keyword" in str(excinfo.value)
def test_global_no_kwargs(func):
    """Tests if args are passed properly to the objective function for when no args are present"""

    # setup optimizer
    options = {"c1": 0.5, "c2": 0.3, "w": 0.9, "k": 2, "p": 2}

    x_max = 10 * np.ones(2)
    x_min = -1 * x_max
    bounds = (x_min, x_max)
    opt_ps = GlobalBestPSO(n_particles=100,
                           dimensions=2,
                           options=options,
                           bounds=bounds)

    # run it
    cost, pos = opt_ps.optimize(func, 1000)

    assert np.isclose(cost, 0, rtol=1e-03)
    assert np.isclose(pos[0], 1.0, rtol=1e-03)
    assert np.isclose(pos[1], 1.0, rtol=1e-03)
Exemple #12
0
    def run(self, particles, print_step=100, iters=1000, verbose=3):
        # options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9}

        # Call instance of PSO
        optimizer = GlobalBestPSO(n_particles=particles,
                                  dimensions=len(self.hyperparameters),
                                  options=self.options,
                                  bounds=(np.array(self.param_minimums),
                                          np.array(self.param_maximums)),
                                  init_pos=self.position)

        # Perform optimization
        best_cost, best_position = optimizer.optimize(self.cost_func,
                                                      print_step=print_step,
                                                      iters=iters,
                                                      verbose=verbose)

        if best_cost < self.cutoff:
            self.save_position(best_cost, best_position)
        else:  # doesnt score well enough
            print('not high')
def test_global_kwargs_without_named_arguments(func):
    """Tests if kwargs are passed properly to the objective function for when kwargs are present and
    other named arguments are not passed, such as print_step"""

    # setup optimizer
    options = {"c1": 0.5, "c2": 0.3, "w": 0.9, "k": 2, "p": 2}

    x_max = 10 * np.ones(2)
    x_min = -1 * x_max
    bounds = (x_min, x_max)
    opt_ps = GlobalBestPSO(n_particles=100,
                           dimensions=2,
                           options=options,
                           bounds=bounds)

    # run it
    cost, pos = opt_ps.optimize(func, 1000, a=1, b=100)

    assert np.isclose(cost, 0, rtol=1e-03)
    assert np.isclose(pos[0], 1.0, rtol=1e-03)
    assert np.isclose(pos[1], 1.0, rtol=1e-03)
 def optimizer_history(self, options):
     opt = GlobalBestPSO(10, 2, options=options)
     opt.optimize(sphere, 1000)
     return opt
Exemple #15
0
def test_ftol_effect(options):
    """Test if setting the ftol breaks the optimization process accodingly"""
    pso = GlobalBestPSO(10, 2, options=options, ftol=1e-1)
    pso.optimize(sphere_func, 2000, verbose=0)
    assert np.array(pso.cost_history).shape != (2000, )
class ParticleSwarms(AbstractPlanner):
    def __init__(self,
                 goal='minimize',
                 max_iters=10**8,
                 options={
                     'c1': 0.5,
                     'c2': 0.3,
                     'w': 0.9
                 },
                 particles=10):
        """
        Particle swarm optimizer.

        Args:
            goal (str): The optimization goal, either 'minimize' or 'maximize'. Default is 'minimize'.
            max_iters (int): The maximum number of iterations for the swarm to search.
            options (dict): ???
            particles (int): The number of particles in the swarm.
        """
        AbstractPlanner.__init__(**locals())
        self.has_optimizer = False
        self.is_converged = False

    def _set_param_space(self, param_space):
        self.param_space = param_space

    def _tell(self, observations):
        self._params = observations.get_params(as_array=False)
        self._values = observations.get_values(as_array=True,
                                               opposite=self.flip_measurements)
        if len(self._values) > 0:
            self.RECEIVED_VALUES.append(self._values[-1])

    def _priv_evaluator(self, params_array):
        for params in params_array:
            params = self._project_into_domain(params)
            self.SUBMITTED_PARAMS.append(params)
        while len(self.RECEIVED_VALUES) < self.particles:
            time.sleep(0.1)
        measurements = np.array(self.RECEIVED_VALUES)
        self.RECEIVED_VALUES = []
        return measurements

    @daemon
    def create_optimizer(self):
        from pyswarms.single import GlobalBestPSO
        self.optimizer = GlobalBestPSO(n_particles=self.particles,
                                       options=self.options,
                                       dimensions=len(self.param_space))
        cost, pos = self.optimizer.optimize(self._priv_evaluator,
                                            iters=self.max_iters)
        self.is_converged = True

    def _ask(self):
        if self.has_optimizer is False:
            self.create_optimizer()
            self.has_optimizer = True

        while len(self.SUBMITTED_PARAMS) == 0:
            time.sleep(0.1)
            if self.is_converged:
                return ParameterVector().from_dict(self._params[-1])
        params = self.SUBMITTED_PARAMS.pop(0)
        return ParameterVector().from_array(params, self.param_space)
Exemple #17
0
def trained_optimizer():
    """Returns a trained optimizer instance with 100 iterations"""
    options = {"c1": 0.5, "c2": 0.3, "w": 0.9}
    optimizer = GlobalBestPSO(n_particles=10, dimensions=2, options=options)
    optimizer.optimize(sphere, iters=100)
    return optimizer
Exemple #18
0
def gbest_history():
    """Returns a GlobalBestPSO instance run for 1000 iterations for checking
    history"""
    pso = GlobalBestPSO(10, 2, {'c1': 0.5, 'c2': 0.7, 'w': 0.5})
    pso.optimize(sphere_func, 1000, verbose=0)
    return pso
 def optimizer_reset(self, options):
     opt = GlobalBestPSO(10, 2, options=options)
     opt.optimize(sphere, 10)
     opt.reset()
     return opt