def fstpso(function: cocoex.Problem,
           population_size: int = 100,
           generations: int = 100,
           show_progress=False):
    """
    Run FST-PSO algorithm.
    :param function: Function to optimize.
    :param population_size: Number of particles in the swarm.
    :param generations: How many iterations to perform.
    :param show_progress: Whether to show progress (verbose logging).
    :return: Tuple of population in shape (generations, population_size, function.dimension) and evaluations in shape (generations, population_size).
    """
    populations = []
    values = []

    def _callback(fpso):
        populations.append(
            list(map(lambda particle: particle.B, fpso.Solutions)))
        values.append(
            list(
                map(lambda particle: particle.CalculatedFitness,
                    fpso.Solutions)))

    FPSO = FuzzyPSO()
    FPSO.set_search_space(
        np.stack([function.lower_bounds, function.upper_bounds], axis=1))
    FPSO.set_swarm_size(population_size)
    FPSO.set_fitness(function)
    FPSO.solve_with_fstpso(callback={
        'function': _callback,
        'interval': 1
    },
                           max_iter=generations - 2,
                           verbose=show_progress)
    return np.array(populations), np.array(values)
Пример #2
0
    z1 = X.dot(W1) + b1  # Pre-activation in Layer 1
    a1 = np.tanh(z1)  # Activation in Layer 1
    z2 = a1.dot(W2) + b2  # Pre-activation in Layer 2
    logits = z2  # Logits for Layer 2

    y_pred = np.argmax(logits, axis=1)
    return y_pred


if __name__ == '__main__':

    # Load the iris dataset
    data = load_iris()

    # Store the features as X and the labels as y
    X = data.data
    y = data.target

    dimensions = (4 * 20) + (20 * 3) + 20 + 3

    FSTPSO = FuzzyPSO()
    FSTPSO.set_search_space([[-5, 5]] * dimensions)
    FSTPSO.set_parallel_fitness(f,
                                arguments={
                                    'x': data.data,
                                    'y': data.target
                                })
    FSTPSO.set_swarm_size(100)
    bestpos, bestf = FSTPSO.solve_with_fstpso(max_iter=1000)
    accuracy = (predict(data.data, np.array(bestpos.X)) == data.target).mean()
    print("Accuracy: %.3f" % (100 * accuracy))
            individual[9]) + "," + str(individual[10]) + "," + str(
                individual[11]) + "," + str(individual[12]) + "," + str(
                    individual[13]) + "," + str(
                        individual[14]) + "," + str(
                            individual[15]) + "," + str(
                                individual[16]) + "," + str(
                                    individual[17]) + "," + str(
                                        individual[18]) + "," + str(
                                            individual[19]) + "," + str(
                                                individual[20]) + "," + str(
                                                    individual[21]) + " ]"
    print "------------------------"

    return y


#----------------------------------------------------------------------
if (__name__ == "__main__"):
    #dims = 22
    FP = FuzzyPSO()
    #FP.set_search_space( [[-3.0, 25.0]]*dims )
    FP.set_search_space(min_max_ind)
    FP.set_fitness(example_fitness)
    #FSTPSO.set_swarm_size(100)
    #bestpos, bestf = FSTPSO.solve_with_fstpso(max_iter=1000)
    result = FP.solve_with_fstpso(max_iter=10000)
    print "------------------------"
    print "Best solution:", result[0]
    print "Whose fitness is:", result[1]
#----------------------------------------------------------------------
from pso_batch_runner import fitness
from fstpso import FuzzyPSO

if __name__ == "__main__":
	dump_best_fitness = "./optimization/best_fitness.txt"
	dump_best_solution = "./optimization/best_solution.txt"

	FP = FuzzyPSO()
	'''
	list_params[0] is the number of the robots which has to be an integer
	list_params[1] is the radar radius which has to be an integer
	list_params[2] is the alpha - it represents how much the cost of the 
				   path influences the chosen cell by the robot
	list_params[3] is the gamma - it represents how much the utility of 
				   the neighborood is reduced when a robot reaches a cell
	
	Please, note that the cast to integer is done in the fitness function
	'''
	FP.set_search_space([[1, 6], [1, 10], [0.001, 10], [0, 1]])
	FP.set_fitness(fitness, skip_test = False)
	result =  FP.solve_with_fstpso(dump_best_fitness = dump_best_fitness, 
								   dump_best_solution = dump_best_solution)
	print("Best solution: " + str(result[0]))
	print("Whose fitness is: " + str(result[1]))
Пример #5
0
    def _fstpso(self,
                data,
                n_clusters,
                max_iter=100,
                n_particles=None,
                m=2,
                path_fit_dump=None,
                path_sol_dump=None):
        #data: 2d array, size (N, S). N is the number of instances; S is the number of variables
        #n_clusters: number of clusters
        #max_iter: number of maximum iterations of FST-PSO, default is 100
        #n_particles: number of particles in the swarm, if None it is automatically set by FST-PSO
        #m: fuzzy clustering coefficient
        #path_fit_dump: path to the file where the best fitness score at each iteration will be dumped
        #path_sol_dump: path to the file where the best solution at each iteration will be dumped

        try:
            from fstpso import FuzzyPSO
        except:
            print(
                "ERROR: please, pip install fst-pso to use this functionality."
            )

        n_instances = data.shape[0]
        n_variables = data.shape[1]

        #set search space boundaries
        bounds = [0] * n_variables
        for i in range(n_variables):
            x = min([row[i] for row in data])
            y = max([row[i] for row in data])
            bounds[i] = [x, y]

        search_space = []
        for i in bounds:
            search_space.extend([i] * n_clusters)

        #initializing FST-PSO
        FP = FuzzyPSO()
        FP.set_search_space(search_space)
        if n_particles != None: FP.set_swarm_size(n_particles)

        #generally better results are obtained with this rule disabled
        FP.disable_fuzzyrule_minvelocity()

        #fitness function definition
        def fitness(particle):
            particle = list(map(float, particle))
            centers = np.reshape(particle, (n_variables, n_clusters)).T

            #calculating fitness value of found solution
            dist = cdist(data, centers, metric='sqeuclidean')

            um = np.zeros(np.shape(dist))
            for i in range(np.shape(um)[0]):
                for j in range(np.shape(um)[1]):
                    um[i][j] = np.sum(
                        np.power(np.divide(dist[i][j], dist[i]),
                                 float(1 / (m - 1))))
            um = np.reciprocal(um)

            um_power = np.power(um, m)

            fitness_value = np.sum(np.multiply(um_power, dist))
            return fitness_value

        #fitness function setting
        FP.set_fitness(fitness, skip_test=True)

        #execute optimization
        result = FP.solve_with_fstpso(max_iter=max_iter,
                                      dump_best_fitness=path_fit_dump,
                                      dump_best_solution=path_sol_dump)

        #reshaping centers
        solution = list(map(float, result[0].X))
        centers = np.reshape(solution, (n_variables, n_clusters)).T

        #calculating membership matrix
        dist = cdist(data, centers, metric='sqeuclidean')
        um = np.zeros(np.shape(dist))
        for i in range(np.shape(um)[0]):
            for j in range(np.shape(um)[1]):
                um[i][j] = np.sum(
                    np.power(np.divide(dist[i][j], dist[i]),
                             float(1 / (m - 1))))
        partition_matrix = np.reciprocal(um)

        #final fitness value
        jm = result[1]

        return centers, partition_matrix, jm
Пример #6
0
from fstpso import FuzzyPSO


def example_fitness(particle):
    return sum(map(lambda x: x**2, particle))


if __name__ == '__main__':

    dims = 10
    FP = FuzzyPSO()
    FP.set_search_space([[-10, 10]] * dims)
    FP.set_fitness(example_fitness)
    result = FP.solve_with_fstpso(max_iter=100)
    print "Best solution:", result[0]
    print "Whose fitness is:", result[1]
Пример #7
0
    def fst_pso_feature_selection(self,
                                  max_iter=100,
                                  min_clusters=2,
                                  max_clusters=10,
                                  performance_metric='MAE',
                                  **kwargs):
        """
            Perform feature selection using the FST-PSO [1] variant of the Integer and Categorical 
            PSO (ICPSO) proposed by Strasser and colleagues [2]. ICPSO hybridizes PSO and Estimation of Distribution 
            Algorithm (EDA), which makes it possible to convert a discrete problem to the (real-valued) 
            problem of estimating the distribution vector of a probabilistic model. Each fitness 
            evaluation a random solution is generated according to the probability distribution 
            encoded by the particle. Because the implementation is a variant on FST-PSO, the optimal 
            settings for the PSO are set automatically.

            If the number of clusters is set to None, this method simultaneously choses the optimal 
            number of clusters.

            [1] Nobile, M. S., Cazzaniga, P., Besozzi, D., Colombo, R., Mauri, G., & Pasi, G. (2018). 
            Fuzzy Self-Tuning PSO: A settings-free algorithm for global optimization. Swarm and 
            evolutionary computation, 39, 70-85.
            
            [2] Strasser, S., Goodman, R., Sheppard, J., & Butcher, S. (2016). A new discrete 
            particle swarm optimization algorithm. In Proceedings of the Genetic and Evolutionary 
            Computation Conference 2016 (pp. 53-60). 
            
            Args:
                max_iter: The maximum number of iterations used in the PSO (default = 10).
                min_clusters: The minimum number of clusters to be identified in the data set (only 
                when nr_clusters = None)
                max_clusters: The maximum number of clusters to be identified in the data set (only 
                when nr_clusters = None)
                performance_metric: The performance metric on which each solution is evaluated (default
                Mean Absolute Error (MAE))
                **kwargs: Additional arguments to change settings of the fuzzy model.
                
            Returns:
                Tuple containing (selected_features, selected_feature_names, optimal_number_clusters)
                    - selected_features: The indices of the selected features.
                    - selected_feature_names: The names of the selected features.
                    - optimal_number_clusters: If initially nr_clusters = None, this argument encodes the optimal number of clusters in the data set. If nr_clusters is not None, the optimal_number_clusters is set to nr_clusters.

        """

        from fstpso import FuzzyPSO

        FP = FuzzyPSO()

        # Create the search space for feature selection with number of dimensions D
        D = np.size(self.dataX, 1)

        s = list([[True, False]] * D)

        # Add dimension for cluster number selection
        if self.nr_clus == None:
            s.append(list(range(min_clusters, max_clusters + 1)))

        # Set search space
        FP.set_search_space_discrete(s)

        # Set the fitness function
        args = {
            'x_train': self.dataX,
            'y_train': self.dataY,
            'verbose': self.verbose
        }
        FP.set_fitness(self._function, arguments=args)

        # solve problem with FST-PSO
        _, best_performance, best_solution = FP.solve_with_fstpso(
            max_iter=max_iter)

        if self.nr_clus == None:
            selected_features = best_solution[:-1]
        else:
            selected_features = best_solution

        # Show best solution with fitness value
        varnams = [
            i for indx, i in enumerate(self.variable_names)
            if selected_features[indx]
        ]
        print('The following features have been selected:', varnams, 'with a',
              self.performance_metric, 'of', round(best_performance, 2))

        if self.nr_clus == None:
            optimal_number_clusters = best_solution[-1]
        else:
            optimal_number_clusters = self.nr_clus

        return selected_features, varnams, optimal_number_clusters