def main(): temp = ga.GA() temp.initPopulation() while temp.generation < temp.max_generations: temp.RWS(5) temp.printGA() temp.generation += 1 print(f'bora {temp.list_prob}') temp.printGA()
def main(): js_skill = [10, 5, 2, 3] speed_skill = [2, 3, 1, 3] hakaton = 3 genesize = binary_ops.bitsNeededToNumber(3) * hakaton def fitness(gene): bits = gene.reshape((hakaton, -1)) val = binary_ops.bitsToBytes(bits) # raise Exception('myvalue:{}'.format(val)) score = sum(np.array(js_skill)[val]) score = score + sum(np.array(speed_skill)[val]) # if score > sum(equipe_brabo): # score = score - 10000 # [1 2 1] = for n1 in range(0, len(val) - 1): for n2 in range(n1 + 1, len(val)): if val[n2] == val[n1]: score -= 1000 # raise Exception(val) return score expected = binary_ops.bitsToBytes(np.array([[True]*5])) + \ binary_ops.bitsToBytes(np.array([[True]*5])) # print('DesiredValue:', expected) ga = GeneticAlgorithm.GA(genesize, population_size=10, epochs=1000, maximization=True) ga.debug = False ga.verbose = True best, pop, score = ga.run(fitness, multiple=False) # print(score) def evaluate(gene): print('==========Evaluation=============') bits = gene.reshape((hakaton, -1)) val = binary_ops.bitsToBytes(bits) # raise Exception('myvalue:{}'.format(val)) skill_power = np.array(js_skill)[val] print(skill_power) print('BEST: ', best) evaluate(np.array([best]))
def gaConfig(genomesize, data): # population_size = 100 # population = initpop() * np.ones((population_size,1)) ga = GeneticAlgorithm.GA( genomesize, population_size=200, # population=initpop, epochs=1000, ephoc_generations=10, selection_count=50, maximization=False, on_ephoc_ends=lambda genome: evaluate(genome, data)) ga.debug = False ga.verbose = True return ga
weights = np.random.randint(1, 20, size=20) values = np.random.randint(1, 10, size=20) max_weight_pct = 0.65 fitness = mlrose.Knapsack(weights, values, max_weight_pct) # Define optimization problem object problemFit = mlrose.DiscreteOpt(length=20, fitness_fn=fitness, maximize=True) # Create and run the Randomized Hill Climbing Optimizer class rhcOptimizer = rhc.RHC() optimizers.append(rhcOptimizer) exp.getComplexityCurve(optimizer=rhcOptimizer, problem=problemFit, problemName='Knapsack') # Create and run the Simulated Annealing Optimizer class saOptimizer = sa.SA() optimizers.append(saOptimizer) exp.getComplexityCurve(optimizer=saOptimizer, problem=problemFit, problemName='Knapsack') # Create and run the Genetic Algorithm Optimizer class gaOptimizer = ga.GA() optimizers.append(gaOptimizer) exp.getComplexityCurve(optimizer=gaOptimizer, problem=problemFit, problemName='Knapsack') # Create and run the MIMIC Optimizer class mimicOptimizer = mimic.MIMIC() optimizers.append(mimicOptimizer) exp.getComplexityCurve(optimizer=mimicOptimizer, problem=problemFit, problemName='Knapsack') exp.getComparisonCurve(optimizers, problem=problemFit, problemName='Knapsack') # Second problem is the One Max optimizers = [] fitness = mlrose.OneMax() # Define optimization problem object problemFit = mlrose.DiscreteOpt(length=20, fitness_fn=fitness, maximize=True, max_val=2) # Create and run the Randomized Hill Climbing Optimizer class rhcOptimizer = rhc.RHC()
GA_config_iter = config['GA']['iter'] GA_config_pop = config['GA']['populacja'] for i, edge in enumerate(edges_G1): edges_G1[i] = eval(edge) for i, edge in enumerate(edges_G2): edges_G2[i] = eval(edge) nodes_G1 = eval(nodes_G1) nodes_G2 = eval(nodes_G2) G1 = nx.Graph() G1.add_nodes_from(nodes_G1) G1.add_edges_from(edges_G1) G2 = nx.Graph() G2.add_nodes_from(nodes_G2) G2.add_edges_from(edges_G2) constGraph = nx.Graph() if mode == 'ANT': A = ACO(G1, G2, int(ANT_config_ant), int(ANT_config_gen), float(ANT_config_alpha), float(ANT_config_beta)) A.ACO_algo() if mode == 'GA': ga = GeneticAlgorithm.GA(G1, G2, float(GA_config_mut), float(GA_config_cr), int(GA_config_iter), int(GA_config_pop)) ga.Generic_algorithm()