def runGA(vector):
    algorithm_param = {'max_num_iteration': 100,\
                    'population_size': 500,\
                    'mutation_probability': .6,\
                    'elit_ratio': .02,\
                    'crossover_probability': .1,\
                    'parents_portion': .4,\
                    'crossover_type':'uniform',\
                    'max_iteration_without_improv':50}
    varbound =np.array([[0,1],[1,4],[0,0.8],[0,1],
                        [0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1]])
    #[V_gBurn,ng,debt_level,V_cng_p,farm1,farm2,farm3,farm4,farm5,farm6,farm7]
    start = timeit.default_timer()  
    var_type = np.array([['real'],['int'],['real'],['real'],['real'],
                          ['int'],['int'],['int'],['int'],['int'],['int'],['int']])   
    model2=ga(function=biodigestor,\
            dimension=len(vector),\
            variable_type_mixed=var_type,\
            variable_boundaries=varbound,\
            function_timeout =600,\
            algorithm_parameters=algorithm_param)
    model2.run()
    stop = timeit.default_timer()
    print('Run time: '+str(stop-start)+' second')
    return model2
Пример #2
0
def genetic(encodings, category):
    cnn = DenseNet169(include_top=False, input_shape=(224, 224, 3))
    image_classifier = load_model('image_model.h5')

    def f(X):
        sum = X[0] * encodings[0]
        for i in range(1, len(X)):
            sum += X[i] * encodings[i]

        decoded_image = (decode_image(sum) * 255).astype(np.uint8)
        decoded_image = Image.fromarray(decoded_image)
        decoded_image = decoded_image.resize((224, 224))
        decoded_image = np.expand_dims(decoded_image, axis=0)
        decoded_image = preprocess_input(decoded_image)
        features = cnn.predict(decoded_image)
        answer = image_classifier.predict(features)[0][category]
        return -answer

    varbound = np.array([[0, 1]] * 10)

    model = ga(function=f,
               dimension=10,
               variable_type='real',
               variable_boundaries=varbound)
    model.run()
    return model.output_dict['variable']
Пример #3
0
def genetic_tcn():
    """TCN input/architecture genetic optimization"""
    varbound = np.array([
        [1, 6],  #histo lag 3, 4, 8, 16, 32, 64
        [4, 6],  #nb_filters 16, 32, 64
        [2, 8],  #kernel size from 2 to 8
        [1, 3]  #nb_stacks
    ])
    algorithm_parameters = {
        'max_num_iteration': None,
        'population_size': 100,  #default is 100
        'mutation_probability': 0.1,
        'elit_ratio': 0.01,
        'crossover_probability': 0.5,
        'parents_portion': 0.3,
        'crossover_type': 'uniform',
        'max_iteration_without_improv': None
    }
    optim = ga(function=rmse_tcn,
               dimension=4,
               variable_type='int',
               variable_boundaries=varbound,
               function_timeout=60 * 60,
               algorithm_parameters=algorithm_parameters)
    optim.run()
Пример #4
0
def calc_genetic(num, iterations, d):
    filename = "lab2_files/" + str(num) + ".txt"
    print(filename)
    n, m, data, abs_result = read_data_from_file_2(filename)

    def f(x):
        predict_y = list(map(lambda row: predict_with_one2(row, x), data))
        smape = nrmse(abs_result, predict_y)
        return smape

    ln = m + 1
    varbound = np.array([[-d, d]] * ln)

    # algorithm_param = {'max_num_iteration': None,
    algorithm_param = {'max_num_iteration': iterations,
                       'population_size': 100,
                       'mutation_probability': 0.1,
                       'elit_ratio': 0.01,
                       'crossover_probability': 0.5,
                       'parents_portion': 0.3,
                       'crossover_type': 'uniform',
                       'max_iteration_without_improv': None}

    # model = ga(function=f, dimension=ln, variable_type='real', variable_boundaries=varbound)
    model = ga(function=f, dimension=ln, variable_type='real', variable_boundaries=varbound,
               algorithm_parameters=algorithm_param)

    model.run()
Пример #5
0
def optimize(
    graph: RoutingGraphDirected,
    num_trucks: int,
    num_routes: int,
    time_weight: float,
    environmental_weight: float,
    distance_weight: float,
    cargo_weight: float,
):
    varbound = np.array([[0, len(graph.vertex_list) - 1]] * num_trucks *
                        num_routes)
    model = ga(
        function=lambda X: loss(
            X,
            num_trucks,
            num_routes,
            graph,
            time_weight,
            environmental_weight,
            distance_weight,
            cargo_weight,
        ),
        dimension=num_trucks * num_routes,
        variable_type="int",
        variable_boundaries=varbound,
    )
    model.run()
Пример #6
0
def roda_genetic(a,b,k=0):
    """
    Função proxy para a utilização de algoritmos genéticos para o cálculo do custo
    para a transformação de uma string em outra
    :param a: palavra 1
    :param b: palavra 2
    :return: custo de transformação da string 1 em string 2 segundo o algoritmo genético
    """
    avalia = AvaliaMovimento(a,b)
    interacoes = len(a)*len(b)
    populacao_avaliada = max(math.factorial(len(a))/interacoes,2)
    if k==0:
        num_parametros = math.factorial(len(a))
    else:
        num_parametros=k
    varbound =  np.array([[0,3]]*num_parametros)
    algorithm_param = {'max_num_iteration': interacoes,\
                       'population_size':populacao_avaliada,\
                       'mutation_probability':0.1,\
                       'elit_ratio': 0.01,\
                       'crossover_probability': 0.7,\
                       'parents_portion': 0.3,\
                       'crossover_type':'uniform',\
                       'max_iteration_without_improv':max(interacoes,10)}
    model = ga(function=avalia.custo,dimension=num_parametros,variable_type='int',variable_boundaries=varbound\
               ,convergence_curve=False,progress_bar=False,\
               algorithm_parameters=algorithm_param)
    model.run()
    return model
Пример #7
0
    def solve(self,
              population=100,
              mutation_probability=0.1,
              crossover_probability=0.5,
              crossover_type='uniform',
              parents_portion=0.3,
              max_iteration=None):
        params = {
            'max_num_iteration': max_iteration,
            'population_size': population,
            'mutation_probability': mutation_probability,
            'elit_ratio': (1 / population),
            'crossover_probability': crossover_probability,
            'parents_portion': parents_portion,
            'crossover_type': crossover_type,
            'max_iteration_without_improv': None
        }

        def target(x):
            penalty = 0
            for edge in self.edges:
                if x[edge[0] - 1] == x[edge[1] - 1]:
                    penalty += self.no_vertex
            return np.unique(x).size + penalty

        var_bounds = np.array([[1, self.no_vertex]] * self.no_vertex)
        model = ga(function=target,
                   dimension=self.no_vertex,
                   variable_type='int',
                   variable_boundaries=var_bounds,
                   algorithm_parameters=params)
        model.run()
        return model.best_function, model.best_variable, model.report
def minimize_genetic_abs_diff(space_vals, model_vals, tolerance_mm,
                              normal_vectors):
    (n, space_vals, space_vals_flat,
     model_vals) = extract_optimization_vars(space_vals, model_vals)
    a0 = extract_abs_diff_ratios(space_vals, model_vals)

    # get bounds
    varbound = np.around(
        extract_bounds(space_vals_flat, tolerance_mm, for_ga=True)).astype(int)

    # set up global vars used in ga function
    global model_vals_ga, a0_ga, n_ga, space_vals_start_flat_ga, normal_vectors_ga
    model_vals_ga = model_vals
    a0_ga = a0
    n_ga = n
    space_vals_start_flat_ga = space_vals_flat
    normal_vectors_ga = normal_vectors

    # initialize and run model
    model = ga(function=abs_diff_ratio_function_ga,
               dimension=9,
               variable_type='int',
               variable_boundaries=varbound,
               convergence_curve=False)
    model.run()

    return model.output_dict['variable'].reshape(n, 3).tolist()
Пример #9
0
    def runGeneticAlgorithm(self):
        # ga
        varbound = np.array([[0, 100]] * len(self.palette))
        algorithm_parameters={'max_num_iteration': 4000,\
                                'population_size':100,\
                                'mutation_probability':0.1,\
                                'elit_ratio': 0.01,\
                                'crossover_probability': 0.5,\
                                'parents_portion': 0.3,\
                                'crossover_type':'uniform',\
                                'max_iteration_without_improv':None}
        model = ga(function=self.f,
                   dimension=len(self.palette),
                   variable_type='real',
                   variable_boundaries=varbound,
                   convergence_curve=True,
                   algorithm_parameters=algorithm_parameters)

        #print(model.param)
        model.run()

        print("output variable is: ")
        output = np.matrix(model.output_dict['variable'])
        output /= output.max()
        print(output)

        return output
def test_rastrigin_initialized():
    parameters = {
        'max_num_iteration': 100,
        'population_size': 2000,
        'mutation_probability': 0.1,
        'elit_ratio': 0.02,
        'crossover_probability': 0.5,
        'parents_portion': 0.3,
        'crossover_type': 'two_point',
        'max_iteration_without_improv': None,
        'multiprocessing_ncpus': 4,
        'multiprocessing_engine': None,
    }

    dimension = 200
    varbound = np.array([[-5.12, 5.12]] * dimension)

    model = ga(function=f,
               dimension=dimension,
               variable_type='real',
               variable_boundaries=varbound,
               algorithm_parameters=parameters)
    model.run(plot=True,
              initial_idv=np.random.random(size=dimension) * 5.12 - 10.24)
    assert model.best_function < 1000
Пример #11
0
    def genetic_alg(self, bounds, params, timeOut=10.0):
        model = ga(
            function=self.distance,
            dimension=bounds.shape[0],
            variable_type='real',
            variable_boundaries=bounds,
            algorithm_parameters=params,
            function_timeout=timeOut
        )

        return model
Пример #12
0
def start(tickers_size,
          tickers_pool,
          prices_pool,
          algorithm_param,
          target='calmar'):
    # varbound = np.array([[0.5, 1.5], [1, 100], [0, 1]])
    varbound = np.array([[0, len(tickers_pool) - 1]] * tickers_size)
    # remember to count down 1
    # vartype = np.array([['real'], ['int'], ['int']])
    vartype = np.array([['int'] * tickers_size])
    if target == 'calmar':
        model = ga(
            function=f,  # i cant submit a self.f to ga lib
            dimension=tickers_size,
            variable_type='int',
            # variable_type_mixed = vartype
            variable_boundaries=varbound,
            algorithm_parameters=algorithm_param,
            tickers_pool=tickers_pool,
            prices_pool=prices_pool,
            function_timeout=20)
    else:
        model = ga(
            function=f_sharpe,  # i cant submit a self.f to ga lib
            dimension=tickers_size,
            variable_type='int',
            # variable_type_mixed = vartype
            variable_boundaries=varbound,
            algorithm_parameters=algorithm_param,
            tickers_pool=tickers_pool,
            prices_pool=prices_pool,
            function_timeout=20)

    model.run()
    solution = model.output_dict
    df = pd.DataFrame.from_dict(solution)
    root_list = df['variable'].values.tolist()
    return root_list
Пример #13
0
def genetic_vehicle(periods,current_charge,charge_speed,lambda_mix,lambda_cost, mix_list, cost_list, tweak):

    def cost_function(X):
        penalty = 0
        charge = 0
        count = 0

        objective = current_charge
        difference = 0
        while(objective < 100):
            objective += charge_speed * 30
            difference += 1

        for i in range(periods):
            if(X[i] == 1):
                count += 1
            charge += 30 * X[i] * charge_speed

        if(difference != count):
            penalty += abs(difference - count) * 100

        if(X[0] == 1 and tweak == 0):
            penalty += 10

        #if(current_charge + charge < 100):
        #    penalty += abs(count - objective)*0.1

        #if(current_charge + charge > 120):
        #    penalty += abs(count - objective)*0.1

        score = []
        for i in range(periods):
            score.append((lambda_mix * mix_list[i] - lambda_cost * cost_list[i]) * X[i])
    
        return - (sum(score) / len(score)) + penalty

    algorithm_param = {'max_num_iteration': 300,\
                   'population_size':150,\
                   'mutation_probability':0.1,\
                   'elit_ratio': 0.01,\
                   'crossover_probability': 0.5,\
                   'parents_portion': 0.3,\
                   'crossover_type':'uniform',\
                   'max_iteration_without_improv':100,
                    }

    model=ga(function=cost_function,dimension=periods,variable_type='bool',algorithm_parameters=algorithm_param,convergence_curve=False)
    model.run()
    solution=model.output_dict
    return solution.get('variable')
Пример #14
0
def genetic_algo(nr_dict, pizzas):
    """
    The matrix representing the solution of this problem is 
    on the column the pizza_id and on the row the group_id.
    If an entry = true then that pizza_id will get delivered 
    to that group_id. 
    """

    val = Validation(nr_dict, pizzas)
    dimensions = (nr_dict['t2'] + nr_dict['t3'] + nr_dict['t4']) * len(pizzas)
    print(dimensions)
    model = ga(function=val.validate,
               dimension=dimensions,
               variable_type='bool')

    model.run()
def egg_ga(maxRun):
    for i in range(maxRun):
        global egg_fVals
        global fVals
        egg_fVals = []
        egg_sol = ga(function=eggcrate,
                     dimension=2,
                     variable_type='real',
                     variable_boundaries=egg_bound,
                     algorithm_parameters=algorithm_param)
        start_time = time.process_time()
        egg_sol.run()
        end_time = time.process_time()
        print("Execution Time: ", end_time - start_time)
        fVals.append(egg_fVals)
    plot_gaRosen(maxRun)
    return egg_sol
Пример #16
0
def main():
    varBound = np.array([[0,500],[1,5],[0,1],[2,9],[0,100],[1,7],[0,100],[1,2],[0,6]])

    algorithm_param = {'max_num_iteration': None,\
                       'population_size':250,\
                       'mutation_probability':0.1,\
                       'elit_ratio': 0.01,\
                       'crossover_probability': 0.5,\
                       'parents_portion': 0.3,\
                       'crossover_type':'two_point',\
                       'max_iteration_without_improv':None,\
                       'func_timeout': 10000000}


    model = ga(function=minRuntime,dimension=9,variable_type='int',variable_boundaries=varBound, algorithm_parameters=algorithm_param)

    model.run()
Пример #17
0
def obtenerSolucion():
    global listaTotal,noLeGustaOrdenado,maxNoGustaIngredientesIni,maxNoGustaIngredientesFin,profundidadIngredientesIni,profundidadIngredientesFin
    algorithm_param = {'max_num_iteration': 15,\
                   'population_size':20,\
                   'mutation_probability':0.1,\
                   'elit_ratio': 0.01,\
                   'crossover_probability': 0.5,\
                   'parents_portion': 0.3,\
                   'crossover_type':'uniform',\
                   'max_iteration_without_improv':3}

    model=ga(function=scoreSolucion,dimension=len(noLeGustaOrdenado),variable_type='bool',algorithm_parameters=algorithm_param)

    model.run()
    mejorSol=model.output_dict["variable"]
    print(mejorSol)
    return mejorSol
def rosen_ga(maxRun):
    for i in range(maxRun):
        global rosen_fVals
        global rfVals
        rosen_fVals = []
        rosen_sol = ga(function=rosenbrock,
                       dimension=2,
                       variable_type='real',
                       variable_boundaries=rosen_bound,
                       algorithm_parameters=algorithm_param)
        start_time = time.process_time()
        rosen_sol.run()
        end_time = time.process_time()
        print("Execution Time: ", end_time - start_time)
        rfVals.append(rosen_fVals)
    plot_gaRosen(maxRun)
    return rosen_sol
Пример #19
0
def _optimize_segment(region, dates, initial, attributes, weekly):
    """Optimize parameters on segment using simulation.
    
    Args:
        region (str): Region to run simulation for.
        dates (tuple (2) of datetime.datetime): Limits of dates.
        initial (tuple (5)): Initial values for (S,E,I,R,D).
        attributes (str, optional): Attributes used for optimization, 'I', 'R' or 'D'.
        weekly (bool, optional): Use weekly time slots if True, otherwise daily.
    """
    fixparams = [None, .2, None, None]  #.0064]

    def _obj(pars):
        return posterior.posterior_objective(pars,
                                             region=region,
                                             fixparams=fixparams,
                                             weekly=weekly,
                                             dates=dates,
                                             initial=initial,
                                             attributes=attributes,
                                             parI=(1, 1),
                                             parR=(1, 1),
                                             parD=(1, 1))

    algorithm_param = {
        'max_num_iteration': 500,
        'population_size': 70,
        'mutation_probability': .65,
        'elit_ratio': .05,
        'crossover_probability': .8,
        'parents_portion': .5,
        'crossover_type': 'uniform',
        'max_iteration_without_improv': 40
    }
    varbound = np.array([[0, 1], [0.033, .5], [0, .1]])
    model = ga(function=_obj,
               dimension=sum([i is None for i in fixparams]),
               variable_type='real',
               variable_boundaries=varbound,
               convergence_curve=False,
               progress_bar=True,
               algorithm_parameters=algorithm_param)
    model.run()
    # best params
    params = posterior._parse_params(model.output_dict['variable'], fixparams)
    return params
Пример #20
0
    def fit_parameters(self, error_func):
        gamma1 = (1 / 10 + 1 / 5) / 2
        gamma2 = 0.02
        gamma3 = 0.02
        beta = 0.14

        num_cumulative_positiv = np.sum(self._observations[:, 0])

        print(f"num_cumulative_positiv {num_cumulative_positiv}")
        print(self._observations[:, 0])

        tau = 28 / num_cumulative_positiv

        delta = 5 / 18

        params = Parameters()
        params.add('gamma1', value=gamma1, min=1 / 10, max=1 / 5)
        params.add('gamma2', value=gamma2, min=0, max=5)
        params.add('gamma3', value=gamma3, min=0, max=5)
        params.add('beta', value=beta, min=0.01, max=0.5)
        params.add('tau', value=tau, min=tau * 0.8, max=tau * 1.2)
        params.add('delta', value=delta, min=0.6 * delta, max=1.4 * delta)

        # result = minimize(self._plumb,
        #                   params,
        #                   args=(len(self._observations), error_func),
        #                   method='leastsq')

        # report_fit(result)

        # self._fit_params = result.params

        print(params.items())

        bounds = np.array([(p.min, p.max) for p_name, p in params.items()])
        self._error_func = error_func

        gamodel = ga(function=self._plumb,
                     dimension=len(bounds),
                     variable_type='real',
                     variable_boundaries=bounds)
        gamodel.run()

        self._fit_params = self._params_array_to_dict(
            gamodel.output_dict['variable'])
Пример #21
0
def runGA():
    params = {'max_num_iteration': 500,\
                   'population_size': 50,\
                   'mutation_probability': 0.1,\
                   'elit_ratio': 0.01,\
                   'crossover_probability': 0.5,\
                   'parents_portion': 0.3,\
                   'crossover_type': 'uniform',\
                   'max_iteration_without_improv': None}

    model=ga(function = fitness,\
                dimension = 21,\
                variable_type = 'bool',\
                variable_boundaries = None,\
                algorithm_parameters = params)

    print(model.param)
    model.run()
Пример #22
0
def run_genetic_merge(items, template):
    algorithm_param = {'max_num_iteration': 3000,
                       'population_size': 100,
                       'mutation_probability': 0.1,
                       'elit_ratio': 0.01,
                       'crossover_probability': 0.5,
                       'parents_portion': 0.3,
                       'crossover_type': 'uniform',
                       'max_iteration_without_improv': 150}
    f = partial(compute_volume_ga, items, template)
    np.random.seed(0)
    model = ga(function=f,
               dimension=len(items),
               variable_type='bool',
               algorithm_parameters=algorithm_param)
    model.run()

    return model.best_variable.astype(dtype="bool")
Пример #23
0
def solve_with_GA(params):
    # algorithm = AlgorithmGaBihc([params['function'],params['bounds'][0]],params['maxFes'],dimensions=params['dimensions'],steps=10000,popSize=100)
    # try:
    #     _explored_points = algorithm.run()
    # except Exception as _:
    #     pass
    # return {'f_value': algorithm.be, 'solution': algorithm.bv}
    alg = ga(function=params['function'],dimension=params['ndim'],variable_type='real',variable_boundaries=np.array(params['bounds']),algorithm_parameters={
                                       'max_num_iteration': 100000,\
                                       'population_size':100,\
                                       'mutation_probability':0.1,\
                                       'elit_ratio': 0.01,\
                                       'crossover_probability': 0.5,\
                                       'parents_portion': 0.3,\
                                       'crossover_type':'uniform',\
                                       'max_iteration_without_improv':None})
    alg.run()
    return {'f_value': alg.best_function, 'solution': alg.best_variable}
 def getModel(self):
     algoParam = {
         'max_num_iteration': 40,
         'population_size': 600,
         'mutation_probability': 0.1,
         'elit_ratio': 0,
         'crossover_probability': 0.5,
         'parents_portion': 0.3,
         'crossover_type': 'uniform',
         'max_iteration_without_improv': None
     }
     gaModel = ga(function=self.objFunction,
                  dimension=self.space.dNum,
                  variable_type='real',
                  variable_boundaries=self.space.getAllDimensionBounds(),
                  convergence_curve=self.convergence_curve,
                  algorithm_parameters=algoParam,
                  progress_bar=self.progress_bar)
     return gaModel
Пример #25
0
    def fit_parameters_ga(self, error_func):
        # Fit parameters using a genetic algorithm

        params = self.get_initial_parameters()

        bounds = np.array([(p.min, p.max) for p_name, p in params.items()])
        self._error_func = error_func

        gamodel = ga(function=self._plumb_ga,
                     dimension=len(bounds),
                     variable_type='real',
                     variable_boundaries=bounds)
        gamodel.run()

        self._fit_params = self._params_array_to_dict(
            gamodel.output_dict['variable'])

        for p_name, p in params.items():
            print("{:10s} [{:.2f} - {:.2f}] : {:.2f}".format(
                p_name, p.min, p.max, self._fit_params[p_name]))
Пример #26
0
    def optimize_genetic(self):
        print("optimizing via genetic alg...")
        algorithm_param = {
            'max_num_iteration': 1000,
            'population_size': 100,
            'mutation_probability': 0.1,
            'elit_ratio': 0.01,
            'crossover_probability': 0.5,
            'parents_portion': 0.3,
            'crossover_type': 'uniform',
            'max_iteration_without_improv': 100
        }

        bounds = self.algorithm.get_possible_parameters(self.algorithm)
        model = ga(function=self.fitness,
                   dimension=len(bounds),
                   variable_type=type(bounds[0][0]).__name__,
                   variable_boundaries=numpy.array(bounds),
                   algorithm_parameters=algorithm_param)
        val = model.run()
        print(val)
Пример #27
0
def test_rastrigin():
    parameters = {
        'max_num_iteration': 1000,
        'population_size': 200,
        'mutation_probability': 0.1,
        'elit_ratio': 0.02,
        'crossover_probability': 0.5,
        'parents_portion': 0.3,
        'crossover_type': 'two_point',
        'max_iteration_without_improv': None,
        'multiprocessing_ncpus': 4,
        'multiprocessing_engine': None,
    }
    varbound = np.array([[-5.12, 5.12]] * 2)

    model = ga(function=f,
               dimension=2,
               variable_type='real',
               variable_boundaries=varbound,
               algorithm_parameters=parameters)
    model.run()
    assert model.best_function < 1e-6
Пример #28
0
def genetic_lstm():
    """LSTM input/architecture genetic optimization"""
    varbound = np.array([
        [3, 64],  #histo
        [16, 64]  #units
    ])
    algorithm_parameters = {
        'max_num_iteration': None,
        'population_size': 100,  #default is 100
        'mutation_probability': 0.1,
        'elit_ratio': 0.01,
        'crossover_probability': 0.5,
        'parents_portion': 0.3,
        'crossover_type': 'uniform',
        'max_iteration_without_improv': None
    }
    optim = ga(function=rmse_lstm,
               dimension=2,
               variable_type='int',
               variable_boundaries=varbound,
               function_timeout=60 * 60,
               algorithm_parameters=algorithm_parameters)
    optim.run()
Пример #29
0
def GA_model(algorithm_param):
    """
    Simple genetic algorithm model based on the geneticalgorithm library
    (https://pypi.org/project/geneticalgorithm/)
    """
    def fitness_func(gene, render=False):
        rewards = []
        episodes = 3
        for _ in range(episodes):
            observation = env.reset()
            for _ in range(goal_steps):
                if render:
                    env.render()
                # Perform logistic regression function here
                # to get either a 0 (left) or 1 (right) action
                weights = gene
                a = sigmoid(observation.dot(weights.T))
                action = int(to_action(a))
                observation, reward, done, info = env.step(action)
                rewards.append(reward)
                if done:
                    break
        result = -sum(rewards) / episodes
        return result

    dim = 4
    varbound = np.array([[-1000, 1000]] * dim)

    model = ga(function=fitness_func,
               dimension=4,
               variable_type='real',
               variable_boundaries=varbound,
               algorithm_parameters=algorithm_param)

    model.run()
    weights = model.output_dict['variable']
    return weights
Пример #30
0
def genetic_algorithm(env: CompilerEnv):
    def f(choices):
        env.reset()
        env.choices = choices = list(map(int, choices))
        s = objective(env)
        return s if s > 0 else float("inf")

    model = ga(
        function=f,
        dimension=len(env.gcc_spec.options),
        variable_type="int",
        variable_boundaries=np.array([[-1,
                                       min(FLAGS.max_range,
                                           len(opt) - 1)]
                                      for opt in env.gcc_spec.options]),
        function_timeout=FLAGS.timeout,
        algorithm_parameters={
            "population_size":
            FLAGS.pop_size,
            "max_num_iteration":
            max(1, int(FLAGS.gcc_search_budget / FLAGS.pop_size)),
            "mutation_probability":
            0.1,
            "elit_ratio":
            0.01,
            "crossover_probability":
            0.5,
            "parents_portion":
            0.3,
            "crossover_type":
            "uniform",
            "max_iteration_without_improv":
            None,
        },
    )
    model.run()
    return model.best_function