def best(population, dist, N): b = population[0] min_cost = cost_tour(population[0], dist, N) mini = 0 for i in range(1, n): curr_cost = cost_tour(population[i], dist, N) if curr_cost < min_cost: min_cost, b = curr_cost, population[i] mini = i return b
def genetic_algo(dist, N): population = initPopulation(dist, N) # Loop indefinitely, for submission. while True: # Fixed no of iterations for i in range(num_iter): fitness = getFitness(population, dist, N) selected = selection(population, fitness) offspring = crossoverPopulation(selected) mutated = mutate(offspring) # mutated = offspring optimalPop = optimizePopulation(population, mutated, fitness, dist, N) population = optimalPop # unraveled = unravel_tour(population[-1], dist) # population[0] = unraveled # print the best tour: # print_tour(best(population, dist, N), N) print cost_tour(best(population, dist, N), dist, N) stdout.flush() return best(population, dist, N)
#!/usr/bin/env python from basic_functions import cost_tour, read_inp from sys import argv t = map(int, raw_input().split()) N, c, d = read_inp('problems/'+argv[1]) print cost_tour(t, d, N)
def getFitness(pop, dist, N): fitness = [] for p in pop: fitness.append(1.0/cost_tour(p, dist, N)) return fitness
def optimizePopulation(original_population, derived_population, fitness, dist, N): orig_sorted = [x for (y,x) in sorted(zip(fitness, original_population))] derv_sorted = sorted(derived_population, key = lambda x: -cost_tour(x, dist, N)) return derv_sorted[-k:] + orig_sorted[k:]
def deltaE(nTour, cTour): ce = cost_tour(cTour, dist, len(cTour)) ne = cost_tour(nTour, dist, len(nTour)) return ce - ne