Example #1
0
    def solve(self):
        """
            This method solves the problem using NTGA algorithm
        """
        generation_limit = int(config.get('run_config', 'generation_limit'))
        self.generate_initial_population()

        new_population = []
        print("Generation limit:", str(generation_limit))
        for i in range(generation_limit):
            print("generation: ", i)
            self.population = self.evaluate(self.population)
            self.non_dominated_set.adds(self.population)

            while len(new_population) < self.population_size:
                parents = self.selection()
                children = mutation(crossover(parents))

                for child in children:
                    count = 0
                    while (child in new_population) or children[0] == children[1]:
                        count += 1
                        mutate(child)
                        if count > 100:
                            break

                new_population += children

            self.population = new_population
            new_population = []

        return self.non_dominated_set
Example #2
0
def get_good_solution(task,
                      crossover_rate=CROSSOVER_RATE,
                      mutation_rate=MUTATION_RATE,
                      tournament_size=TOURNAMENT_SIZE,
                      population_size=POPULATION_SIZE):
    best_individuals_values = np.empty(MAX_ITERATIONS)

    population = init_population(task, population_size)
    outer_iterator = 0
    best_individual = 0
    best_individual_value = 0
    global_best_individual = 0
    global_best_individual_value = 0
    print('Initial value of the knapsack =',
          population.individuals[0].evaluate())
    while outer_iterator < MAX_ITERATIONS:
        inner_iterator = 0
        new_population = Population()
        while inner_iterator < population_size:
            parent1 = tournament(population, tournament_size)
            parent2 = tournament(population, tournament_size)
            child = crossover(parent1, parent2, crossover_rate)
            mutate(child, mutation_rate)
            new_population.add_individual(child)
            inner_iterator += 1
        best_individual = population.best_individual()
        best_individual_value = best_individual.evaluate()
        if best_individual_value >= global_best_individual_value:
            global_best_individual = best_individual
            global_best_individual_value = best_individual_value
        new_population.set_first_individual(global_best_individual)
        population = new_population
        best_individuals_values[outer_iterator] = global_best_individual_value
        outer_iterator += 1
    return global_best_individual, best_individuals_values
Example #3
0
def main(pop_size, mutation_rate, nr_generations):
    growth = []

    # 1. Create the population
    population = initial_population(pop_size)

    for j in range(nr_generations):
        # population = nextgeneration(population, mutation_rate)
        listscores = []
        genfitness = 0

        # 2. Determine fitness
        # check for the entire population the fitness
        for index, data in enumerate(population):
            fitness = fitnesscheck(data)
            listscores.append([index, fitness])
            genfitness += fitness

        print("De "+ str(j) + "e generatie heeft een gemiddelde score van ", genfitness/pop_size)


        # Make N new individuals for the next generation
        newpopulation = []
        for i in range(pop_size):
            # 3. Select the mating pool
            # Kiest twee parents tournament style
            win1, win2 = pickparents(pop_size, listscores)

            # 4. Breed
            # Crossbreed een nieuw child
            newpopulation.append(cross(population[win1], population[win2]))

            # 5. Mutate
            mutation_decision = random.randint(0, 100)

            if mutation_decision > 90:
                mutate(newpopulation[i])

        population = newpopulation
        growth.append(genfitness/pop_size)

    print(growth)
    plt.plot(growth)
    plt.show()

    with open("endresult.txt", "w") as txt_file:
        for data in population:
            for line in data:
                txt_file.write(" ".join(str(line)) + "\n")
Example #4
0
def GA(M, N, MaxGen, Pc, Pm, Er, bb, method):

    population = innit.initialize(M, N)

    fitness = []

    for i in range(M):
        fitness.append(bb.fitnes(population.genes[i]))

    population['fitness'] = fitness

    newpop = np.array(population.drop(['fitness'], axis=1))
    print("generation  #{0}".format(1))

    fitnesses = [0]
    for i in range(1, MaxGen):
        print("generation  #{0}".format(i + 1))

        for j in range(0, M, 2):

            p1, p2 = sel.selection(population)

            chilld1, chilld2 = cs.cross(p1, p2, Pc, method)

            child1 = mt.mutate(chilld1, Pm)
            child2 = mt.mutate(chilld2, Pm)

            newpop[j] = child1

            newpop[j + 1] = child2
        newpopulation = pd.DataFrame(newpop, columns=['genes'])

        fitness1 = []
        for i in range(M):
            fitness1.append(bb.fitnes(newpopulation.genes[i]))
        newpopulation['fitness'] = fitness1

        newpopulation = el.ellist(newpopulation, Er)

        population = newpopulation.copy()
        newpopulation.sort_values(['fitness'], inplace=True, ascending=False)
        newpopulation = newpopulation.reset_index(drop=True)

        maxx = max(newpopulation.fitness)
        #print(maxx)
        #print(maxx,newpopulation.genes[newpopulation.fitness == maxx].iloc[0])
        fitnesses.append(maxx)
    return fitnesses
Example #5
0
def operate(gen_in, mu_in, lamb_da_in, boundary_in, gen, maxgen):
    crosser = np.random.randint(0, 7)
    lambda_gen = crossover(crosser,
                           gen_in,
                           mu_in,
                           lamb_da_in,
                           objective_function,
                           BLX_alpha=0.5,
                           SPX_epsilon=1,
                           SBX_n=2,
                           UNDX_sigma_xi=0.8,
                           UNDX_sigma_eta=0.707,
                           DE_K=0.5,
                           DE_F=0.3)
    mutant = np.random.randint(0, 7)
    lambda_gen = mutate(mutant,
                        lambda_gen,
                        boundary_in,
                        gen,
                        objective_function,
                        normal_sigma=0.5,
                        uniform_pm=0.1,
                        boundary_pm=0.1,
                        maxgen=maxgen,
                        b=5,
                        cauchy_sigma=0.5,
                        delta_max=20,
                        n=2,
                        DE_K=0.5,
                        DE_F=0.3)
    fixme = np.random.randint(0, 2)
    return fix(fixme, lambda_gen, boundary_in)
Example #6
0
def genetic_algorithm(tournament_size=TOURNAMENT_SIZE,
                      crossover_rate=CROSSOVER_RATE,
                      mutation_rate=MUTATION_RATE,
                      population_size=POPULATION_SIZE):
    task = read(input_file=OUTPUT_FILE)
    population = init_population(NUMBER_OF_ITEMS, population_size)
    best_ind = []
    i = 0
    new_pop_val = []
    while i < ITERATIONS:
        # print(i)
        j = 0
        new_pop_arr = []

        while j < population_size:
            parent1 = population.tournament(tournament_size, task)
            parent2 = population.tournament(tournament_size, task)
            child = crossover(parent1, parent2, crossover_rate)
            mutated_child = mutate(child, mutation_rate)
            new_pop_arr.append(mutated_child)
            j += 1
        population = Population(new_pop_arr)
        i += 1
        best_from_pop = population.tournament(population_size, task)
        best_evaluated = best_from_pop.best_individual(task)
        new_pop_val.append(best_evaluated)
    return new_pop_val
def next_generation(previous_generation):
    next_generation = []
    if (previous_generation == None):
        for _ in range(constant.mew):
            individual = Individual()
            #individual.set_fitness(classification_rate(individual.features)
            #print(str(individual.features)+"  fitness: "+str(individual.fitness))
            next_generation.append(individual)
        return next_generation

    #individuls retained from previous population
    individuals_to_retain = best_individuals(previous_generation,
                                             constant.retain_previous)
    for individual in individuals_to_retain:
        #print("retained"+str(individual.features)+"  fitness: "+str(individual.fitness))
        next_generation.append(individual)

    #crossover population
    #mating_pool=[]
    #for _ in range(constant.mating_pool_size):
    #to_mate=select_individual(previous_generation)
    #mating_pool.append(to_mate)

    for _ in range(constant.mew - constant.retain_previous):
        male = select_individual(previous_generation)
        #print("male selected for mating"+str(male.features)+"  fitness: "+str(male.fitness))
        female = select_individual(previous_generation)
        #print("female selected for mating"+str(female.features)+"  fitness: "+str(female.fitness))
        child = crossover(male, female)
        #print("after crossover"+str(child.features)+"  fitness: "+str(child.fitness))
        mutated_child = mutate(child)
        #print("after mutation"+str(mutated_child.features)+"  fitness: "+str(mutated_child.fitness))
        next_generation.append(mutated_child)
    return next_generation
def main(popsize,gens):
    growth = []

    # Opent random start population
    alldata = genrandomstart(popsize)

    for j in range(gens):
        listscores = []
        genfitness = 0

        # check for the entire population the fitness
        for index, data in enumerate(alldata):
            fitness = fitnesscheck(data)
            listscores.append([index, fitness])
            genfitness += fitness

        print("De "+ str(j) + "e generatie heeft een gemiddelde score van ", genfitness/popsize)

        # Make N new individuals for the next generation
        newpopulation = []
        for i in range(int(popsize/2)):
            # Kiest twee parents tournament style
            win1, win2 = pickparents(popsize, listscores)
            # Crossbreed een nieuw child
            child1,child2 = cross(alldata[win1], alldata[win2])
            newpopulation.append(child1)
            newpopulation.append(child2)
            print("nieuwe ronde, nieuwe kansen")
            for j in range(2):
                print(j)
                mutation_decision = random.randint(0, 100)

                if mutation_decision > 90:
                    mutate(newpopulation[2*i+j])

        alldata = newpopulation
        growth.append(genfitness/popsize)

    print(growth)
    plt.plot(growth)
    plt.show()

    with open("endresult.txt", "w") as txt_file:
        for data in alldata:
            for line in data:
                txt_file.write(" ".join(str(line)) + "\n")
Example #9
0
def breed(inputfile, outputdir):    
    ip = open(inputfile)
    breedParams = ip.read()
    ip.close()
    breedParams = breedParams.split("\n")
    # This is hardcoded. Put it in a config file later.
    INITIALSETSZ = 10000
    MATINGPOOLSZ = 10
    #matingpool = []
    #offsprings = []
    MUTATIONRATE = 0.10
    mutationpool = []
    #for param in breedParams:
    while len(breedParams) > 0:
        param = random.choice(breedParams)
        if not param:
            break
        breedParams.remove(param)
        op = ' '.join(param.split())
        op = outputdir + op + '.txt'
        op = open(op, 'a')
        seedgraphs = initialization.getSeedGraphs(INITIALSETSZ, param)
        for i in range(int(len(seedgraphs) * MUTATIONRATE)):
            mutationpool.append(random.choice(seedgraphs))
        mutation.mutate(param, mutationpool)
        del mutationpool[:]
        matingpool = selection.selectGraphs(MATINGPOOLSZ, seedgraphs, param, op)
        del seedgraphs[:]        
        maxiter = 10
        for iter in range(maxiter):
            offsprings = crossover.produceOffsprings(param, matingpool)
            del matingpool[:]
            for i in range(int(len(offsprings) * MUTATIONRATE)):
                mutationpool.append(random.choice(offsprings))
            mutation.mutate(param, mutationpool)
            del mutationpool[:]
            matingpool = selection.selectGraphs(MATINGPOOLSZ, offsprings, param, op)
            del offsprings[:]
            avlen = 0
            for m in matingpool:
                avlen += len(m)
            avlen /= len(matingpool)
        op.write('\n\n')
        op.write('*****************')
        op.close()
    return
def mutate_pop(individual, mutpb, primitive_set, terminal_set):
    offspring = []

    # Replace
    if random.random() < mutpb:
        offspring.append((mutate(mutate_replace, primitive_set, terminal_set,
                                 deepcopy(individual[0])), None))

    # Insert
    if random.random() < mutpb:
        offspring.append((mutate(mutate_insert,
                                 primitive_set,
                                 terminal_set,
                                 deepcopy(individual[0]),
                                 use_input_ids=True), None))

    # Shrink
    if random.random() < mutpb:
        offspring.append((mutate(mutate_shrink, primitive_set, terminal_set,
                                 deepcopy(individual[0])), None))

    return offspring
Example #11
0
def genetic_algorithm(task,
                      crossover_rate=CROSSOVER_RATE,
                      mutation_rate=MUTATION_RATE,
                      tournament_size=TOURNAMENT_SIZE,
                      population_size=POPULATION_SIZE):
    best_individuals_values = np.empty(MAX_ITERATIONS)
    start_time = time.time()

    population = init_population(task, population_size)
    outer_iterator = 0
    best_individual = 0
    best_individual_value = 0
    global_best_individual = 0
    global_best_individual_value = 0
    while outer_iterator < MAX_ITERATIONS:
        inner_iterator = 0
        new_population = Population()
        while inner_iterator < population_size:
            parent1 = tournament(population, tournament_size)
            parent2 = tournament(population, tournament_size)
            child = crossover(parent1, parent2, crossover_rate)
            mutate(child, mutation_rate)
            new_population.add_individual(child)
            inner_iterator += 1
        best_individual = population.best_individual()
        best_individual_value = best_individual.evaluate()
        if best_individual_value >= global_best_individual_value:
            global_best_individual = best_individual
            global_best_individual_value = best_individual_value
        new_population.set_first_individual(global_best_individual)
        population = new_population
        best_individuals_values[outer_iterator] = global_best_individual_value
        outer_iterator += 1
    print("--- Genetic algorithm\'s execution time = %s seconds ---" %
          (time.time() - start_time))
    print('Genetic algorithm\'s final result =', global_best_individual_value)
Example #12
0
def operate(gen_in, mu_in, lamb_da_in, boundary_in, gen, maxgen):
    lambda_gen = crossover_UNDX(gen_in, mu_in, lamb_da_in)
    mutant = np.random.randint(0, 6)
    lambda_gen = mutate(mutant,
                        lambda_gen,
                        boundary_in,
                        gen,
                        normal_sigma=0.5,
                        uniform_pm=0.1,
                        boundary_pm=0.1,
                        maxgen=maxgen,
                        b=5,
                        cauchy_sigma=0.5,
                        delta_max=20,
                        n=2)
    return reflect_fix(lambda_gen, boundary_in)
Example #13
0
def multi_recombine(population, data1, data2, prob, mutations, cross):
    '''
    recombine 2 distinct sound samples through random crossover points
    and mutate

    parameters:
        data1: 1D numpy array of first sound sample
        data2: 1D numpy array of second sound sample
        population: desired population size to be created
        cross: desired number of crossover points
        mutate: percent chance of each of the desired mutations occurs
        mutations: number of mutations to perform
        cross: number of crossover points

    return:
        a list of size population containing 1D numpy arrays
        of the recombined sound samples
    '''
    #resize the smaller sound data to match the larger sound data
    if (len(data1) != len(data2)):
        if (len(data1) < len(data2)):
            smaller = data1
            larger = data2
        else:
            smaller = data2
            larger = data1
        multiple = int(len(larger) / len(smaller))
        init = smaller
        if (multiple >= 2):
            for i in range(multiple - 1):
                init = np.concatenate((init, smaller))
        diff = len(larger) - multiple * len(smaller)
        init = np.concatenate((init, smaller[0:diff]))
        data1 = init
        data2 = larger
    dft1 = fft(data1)
    dft2 = fft(data2)
    if (len(dft1) % 2 == 1):
        chop = int(len(dft1) / 2) + 1
        next_chop = chop
    else:
        chop = int(len(dft1) / 2)
        next_chop = chop + 1
    #focusing on keeping the left portion of f1
    f1 = dft1[1:chop]
    f1_conj = np.flip(dft1[next_chop:len(dft1)])
    f2 = dft2[1:chop]
    f2_conj = np.flip(dft2[next_chop:len(dft2)])
    resulting_list = []
    for i in range(int(population / 2)):
        max_range = range(int(len(f1) / 50), int(3 * len(f1) / 4))
        crossover_points = random.sample(max_range, cross)
        crossover_points.sort()

        # the first slice in the sequence is always from f1, while
        # the last slice is from f2 if the cross is odd otherwise it's f1
        result1_seq = [f1[0:crossover_points[0]]]
        result1_conj_seq = [f1_conj[0:crossover_points[0]]]
        for i in range(cross - 1):
            # if i even add from f2
            if (i % 2 == 0):
                result1_seq.append(f2[crossover_points[i]:crossover_points[i +
                                                                           1]])
                result1_conj_seq.append(
                    f2_conj[crossover_points[i]:crossover_points[i + 1]])
            else:
                result1_seq.append(f1[crossover_points[i]:crossover_points[i +
                                                                           1]])
                result1_conj_seq.append(
                    f1_conj[crossover_points[i]:crossover_points[i + 1]])
        if (cross % 2 == 1):
            result1_seq.append(f2[crossover_points[cross - 1]:len(f1)])
            result1_conj_seq.append(f2_conj[crossover_points[cross -
                                                             1]:len(f1)])
        else:
            result1_seq.append(f1[crossover_points[cross - 1]:len(f1)])
            result1_conj_seq.append(f1_conj[crossover_points[cross -
                                                             1]:len(f1)])

        result2_seq = [f2[0:crossover_points[0]]]
        result2_conj_seq = [f2_conj[0:crossover_points[0]]]
        for i in range(cross - 1):
            # if i even add from f1
            if (i % 2 == 0):
                result2_seq.append(f1[crossover_points[i]:crossover_points[i +
                                                                           1]])
                result2_conj_seq.append(
                    f1_conj[crossover_points[i]:crossover_points[i + 1]])
            else:
                result2_seq.append(f2[crossover_points[i]:crossover_points[i +
                                                                           1]])
                result2_conj_seq.append(
                    f2_conj[crossover_points[i]:crossover_points[i + 1]])
        if (cross % 2 == 1):
            result2_seq.append(f1[crossover_points[cross - 1]:len(f1)])
            result2_conj_seq.append(f1_conj[crossover_points[cross -
                                                             1]:len(f1)])
        else:
            result2_seq.append(f2[crossover_points[cross - 1]:len(f1)])
            result2_conj_seq.append(f2_conj[crossover_points[cross -
                                                             1]:len(f1)])

        result1 = np.concatenate(result1_seq)
        result1_conj = np.concatenate(result1_conj_seq)
        result2 = np.concatenate(result2_seq)
        result2_conj = np.concatenate(result2_conj_seq)
        mutation.mutate(result1, result1_conj, prob, mutations)
        mutation.mutate(result2, result2_conj, prob, mutations)
        assert (len(result1) == len(result1_conj))
        assert (len(result2) == len(result2_conj))
        assert (len(result1) == len(result2))
        assert (len(result1) == len(f1))

        if (len(dft1) % 2 == 1):
            beg = np.array([dft1[0]])
            final1 = np.concatenate((beg, result1, np.flip(result1_conj)))
            conv1 = np.real(ifft(final1))
            loudest1 = np.amax(np.absolute(conv1))
            resulting_list.append(np.float32(conv1 / loudest1))
            final2 = np.concatenate((beg, result2, np.flip(result2_conj)))
            conv2 = np.real(ifft(final2))
            loudest2 = np.amax(np.absolute(conv2))
            resulting_list.append(np.float32(conv2 / loudest2))
        else:
            beg = np.array([dft1[0]])
            mid = np.array([dft1[chop]])
            final1 = np.concatenate((beg, result1, mid, np.flip(result1_conj)))
            conv1 = np.real(ifft(final1))
            loudest1 = np.amax(np.absolute(conv1))
            resulting_list.append(np.float32(conv1 / loudest1))
            final2 = np.concatenate((beg, result2, mid, np.flip(result2_conj)))
            conv2 = np.real(ifft(final2))
            loudest2 = np.amax(np.absolute(conv2))
            resulting_list.append(np.float32(conv2 / loudest2))
    return resulting_list
Example #14
0
def single_recombine(population, data1, data2, prob, mutations):
    '''
    recombine 2 distinct sound samples through 1 random crossover point
    and mutate

    parameters:
        sampling_freq: Sampling frequency of both sound samples
        data1: 1D numpy array of first sound sample
        data2: 1D numpy array of second sound sample
        population: desired population size to be created
        cross: desired number of crossover points
        mutate: percent chance of each of the desired mutations occurs
        mutations: number of mutations to perform

    return:
        a list of size population containing 1D numpy arrays
        of the recombined sound samples
    '''
    #resize the smaller sound data to match the larger sound data
    if (len(data1) != len(data2)):
        if (len(data1) < len(data2)):
            smaller = data1
            larger = data2
        else:
            smaller = data2
            larger = data1
        multiple = int(len(larger) / len(smaller))
        init = smaller
        if (multiple >= 2):
            for i in range(multiple - 1):
                init = np.concatenate((init, smaller))
        diff = len(larger) - multiple * len(smaller)
        init = np.concatenate((init, smaller[0:diff]))
        data1 = init
        data2 = larger
    dft1 = fft(data1)
    dft2 = fft(data2)
    if (len(dft1) % 2 == 1):
        chop = int(len(dft1) / 2) + 1
        next_chop = chop
    else:
        chop = int(len(dft1) / 2)
        next_chop = chop + 1
    #focusing on keeping the left portion of f1
    f1 = dft1[1:chop]
    f1_conj = np.flip(dft1[next_chop:len(dft1)])
    f2 = dft2[1:chop]
    f2_conj = np.flip(dft2[next_chop:len(dft2)])
    resulting_list = []
    for i in range(int(population / 2)):
        crossover_point = random.randint(int(len(f1) / 50),
                                         int(3 * len(f1) / 4))
        result1 = np.concatenate(
            (f1[0:crossover_point], f2[crossover_point:len(f2)]))
        result1_conj = np.concatenate(
            (f1_conj[0:crossover_point], f2_conj[crossover_point:len(f2)]))
        result2 = np.concatenate(
            (f2[0:crossover_point], f1[crossover_point:len(f1)]))
        result2_conj = np.concatenate(
            (f2_conj[0:crossover_point], f1_conj[crossover_point:len(f1)]))
        mutation.mutate(result1, result1_conj, prob, mutations)
        mutation.mutate(result2, result2_conj, prob, mutations)
        if (len(dft1) % 2 == 1):
            beg = np.array([dft1[0]])
            final1 = np.concatenate((beg, result1, np.flip(result1_conj)))
            conv1 = np.real(ifft(final1))
            loudest1 = np.amax(np.absolute(conv1))
            resulting_list.append(np.float32(conv1 / loudest1))
            final2 = np.concatenate((beg, result2, np.flip(result2_conj)))
            conv2 = np.real(ifft(final2))
            loudest2 = np.amax(np.absolute(conv2))
            resulting_list.append(np.float32(conv2 / loudest2))
        else:
            beg = np.array([dft1[0]])
            mid = np.array([dft1[chop]])
            final1 = np.concatenate((beg, result1, mid, np.flip(result1_conj)))
            conv1 = np.real(ifft(final1))
            loudest1 = np.amax(np.absolute(conv1))
            resulting_list.append(np.float32(conv1 / loudest1))
            final2 = np.concatenate((beg, result2, mid, np.flip(result2_conj)))
            conv2 = np.real(ifft(final2))
            loudest2 = np.amax(np.absolute(conv2))
            resulting_list.append(np.float32(conv2 / loudest2))
    return resulting_list
Example #15
0
def vrp_ga(m=0.2, gen=500, N=100, best_N=5, setup=1, plot_folder="plots/"):
    """
    Complete function, optimizing the vehicle routing problem using genetic
    algorithms and a custom crossover/mutation part.

    Args
        m                   mutation probability
        gen                 number of generation (iterations)
        N                   population size (must be divisable by 4)
        best_N              plotting the N best individuals each generation
        setup               number of setup (currently 1 and 2)
        plot_folder         path to save plots
    """
    if N % 4 != 0:
        raise ValueError(
            "[!] 'N' (population size) has to be dividedable by 4!")

    history = np.zeros((gen, best_N))

    # create one test member, to load task
    # cap = capacity of trucks in list
    # demands = demands of cities
    # dist = distance matrix
    # tc = transportation costs of trucks
    _, cap, demands, dist, tc = complete_init(task_nr=setup)

    ##### initialization
    # create a list for our population with N elements, with multiple
    # possible setups as numpy arrays
    # [instead of doing a list of 2d arrays, we do one 3d array, where the
    # first dimension stands for the members]
    # population has 3 dimensions: members x trucks x cities
    population = np.zeros((N, len(cap), len(demands)))
    # fill the population array
    for i in range(N):
        population[i] = complete_init(task_nr=setup)[0]

    for gen_idx in tqdm(range(gen)):
        ##### fitness
        permutations = np.array(
            [permute_way(member, dist, demands, cap) for member in population])
        fitness_scores = np.array([
            fitness_function(dist, permutation, member, tc)
            for permutation, member in zip(permutations, population)
        ])
        history[gen_idx] = sorted(fitness_scores)[-best_N:]

        ##### selection
        selected_indx = roulette_wheel_selection(fitness_scores, int(N / 2))
        selected_members = population[selected_indx]
        np.random.shuffle(selected_members)

        ##### crossover
        children = vrp_crossover(selected_members, cap, demands)

        ##### mutation
        for i, child in enumerate(children):
            if random.uniform(0, 1) < m:
                children[i] = mutate(child, cap)

        ##### replacement
        population = replacement(population,
                                 children,
                                 mode="delete-all",
                                 n=len(children),
                                 based_on_fitness=True,
                                 fitness_old=fitness_scores)

    ##### plotting
    plt.figure()
    plt.plot(history)
    plt.xlabel("Generations")
    plt.savefig(plot_folder + "history" +
                strftime("%Y-%m-%d %H:%M:%S", gmtime()) + ".png")
    plt.show()
def run_iteration(dataset, features, feature_cdds, population, population_size,
                  elitism, evaluation_threshold, bacc_weight, uniqueness,
                  best_classifiers, crossover_probability,
                  mutation_probability, tournament_size, print_results):
    """

    Runs a single genetic algorithm iteration.

    Parameters
    ----------
    dataset : Pandas DataFrame
        data set
    features : list
        list of features
    feature_cdds : list
        list of feature cdds dicts
    population : list
        list of classifiers (Classifier objects)
    population_size : int
        population size
    elitism : bool
        if True the best found solutions are added to the population in each selection operation
    evaluation_threshold : float
        classifier evaluation threshold
    bacc_weight : float
        weight of balanced accuracy in the multi-objective score
    uniqueness : bool
         if True only unique inputs in a classifier are counted, otherwise the input cdd score is multiplied by
         the number of input
    best_classifiers : BestSolutions object
        includes best solutions
    crossover_probability : float
        crossover probability
    mutation_probability : float
        mutation probability
    tournament_size : float
        tournament size
    print_results : bool
         if True more information is shown, otherwise not all results are printed

    Returns
    -------
    best_classifiers : BestSolutions object
        includes all best classifiers

    """

    # SELECTION
    selected_parents = []
    temp_population = []

    if elitism is True:
        temp_population = population.copy()
        classifier_id = random.randrange(0, len(best_classifiers.solutions))
        temp_population.append(best_classifiers.solutions[classifier_id])

    for i in range(0, int(population_size / 2)):  # iterate through population

        # select two parents
        if elitism is True:
            first_parent_id, second_parent_id = selection.select(
                temp_population, tournament_size)
            # add new parents to selected parents
            selected_parents.append(
                temp_population[first_parent_id].__copy__())
            selected_parents.append(
                temp_population[second_parent_id].__copy__())
        else:
            first_parent_id, second_parent_id = selection.select(
                population, tournament_size)
            # add new parents to selected parents
            selected_parents.append(population[first_parent_id].__copy__())
            selected_parents.append(population[second_parent_id].__copy__())

    population.clear()  # empty population

    # CROSSOVER
    for i in range(0, int(population_size /
                          2)):  # iterate through selected parents

        crossover_rand = random.random(
        )  # randomly choose probability for crossover

        first_parent_id = random.randrange(
            0, len(selected_parents))  # randomly choose first parent id
        first_parent = selected_parents[first_parent_id].__copy__(
        )  # copy first parent

        del selected_parents[
            first_parent_id]  # remove parent from available parents

        second_parent_id = random.randrange(
            0, len(selected_parents))  # randomly choose second parent id
        second_parent = selected_parents[second_parent_id].__copy__(
        )  # copy first parent

        del selected_parents[
            second_parent_id]  # remove parent from available parents

        # if the crossover_rand is lower than or equal to probability - apply crossover
        if crossover_rand <= crossover_probability:

            # crossover
            first_child, second_child = crossover.crossover_parents(
                first_parent, second_parent)

            population.append(
                first_child.__copy__())  # add children to the new population
            population.append(second_child.__copy__())

        else:
            population.append(first_parent.__copy__()
                              )  # if crossover not allowed - copy parents
            population.append(second_parent.__copy__())

    # MUTATION
    population = mutation.mutate(population, features, mutation_probability,
                                 evaluation_threshold)

    # UPDATE THETA AND REMOVE RULE DUPLICATES
    for classifier in population:
        classifier.update_theta()
        classifier.remove_duplicates()

    # EVALUATION OF THE POPULATION
    avg_population_score, best_classifiers = \
        eval.evaluate_individuals(population=population,
                                  dataset=dataset,
                                  bacc_weight=bacc_weight,
                                  feature_cdds=feature_cdds,
                                  uniqueness=uniqueness,
                                  best_classifiers=best_classifiers)

    if print_results:
        print("average population score: ", avg_population_score)

    return best_classifiers
        # Storing the average values over every single iteration
        average_vol = []
        average_num = []
        average_value = []

        for i in range(NUM_OF_ITERATIONS):
            # Generate the initial population
            population = gen.generate_pop(box_params, NUM_OF_INDIVIDUALS, ROTATIONS)

            gen = 0
            average_fitness = []
            while gen < NUM_OF_GENERATIONS:
                population, fitness = ft.evaluate(population, truck_dimension, box_params, total_value)
                population = ns.rank(population, fitness)
                offsprings = re.crossover(deepcopy(population), PC, k=K)
                offsprings = mt.mutate(offsprings, PM1, PM2, ROTATIONS)
                population = ss.select(population, offsprings, truck_dimension, box_params, total_value,
                                       NUM_OF_INDIVIDUALS)
                average_fitness.append(calc_average_fitness(population))
                gen += 1
            results = []

            # Storing the final Rank 1 solutions
            for key, value in population.items():
                if value['Rank'] == 1:
                    results.append(value['result'])

            # Plot using plotly
            color_index = vis.draw_solution(pieces=packages)
            vis.draw(results, color_index)

def break_bit_list(bit_list):
    length = int(len(bit_list) / 2)
    NS = []
    NL = []
    for i in range(length):
        NS.append(bit_list[i])
        NL.append(bit_list[i + length])
    result = (NS, NL)
    return result


def __bitlist_to_int(bitlist):
    return int("".join(str(i) for i in bitlist), 2)


pop = population.init(POPULATION_SIZE, CROMOSSOME_SIZE, 'BIN', BOUNDS)
evaluations = fit.evaluate(fitness, pop)
#print(evaluations)
#print(selection.select(evaluations))
#selection.select(evaluations)

for _ in range(MAX_GENERATIONS):
    evaluations = fit.evaluate(fitness, pop)
    pop = selection.select(2, 1, evaluations)
    pop = crossover.single_point(pop, CROSSOVER_PROB)
    pop = mutation.mutate(pop, MUTATION_PROB)
evaluations.sort(key=lambda tup: tup[1])
print(evaluations)
def main():
    global NFE
    NFE = 0

    CostFunction = MinOne  # Cost Function

    n_var = 20  # Number of Decision Variables

    VarSize = np.array([1, n_var])  # Decision Variables Matrix Size

    # GA Parameters

    MaxIt = 500  # Maximum Number of Iterations

    nPop = 10  # Population Size

    pc = 0.8  # crossover Percentage
    nc = 2 * round(pc * nPop / 2)  # Number of Offsprings (Parnets)

    pm = 0.1  # Mutation Percentage
    nm = round(pm * nPop)  # Number of Mutants

    mu = 0.1  # Mutation Rate

    beta = 8

    class EmptyIndividual():
        pass

    Best = [EmptyIndividual() for i in range(MaxIt)]
    pop = np.array([EmptyIndividual() for i in range(nPop)])

    for i in range(nPop):
        # Initialize Position
        pop[i].Position = np.array(
            [random.randint(0, 1) for i in range(n_var)])

        # Evaluation
        pop[i].Cost = CostFunction(pop[i].Position)

    # Sort Population
    Costs = np.array([pop[i].Cost for i in range(nPop)])
    SortOrder = np.argsort(Costs)

    pop = pop[SortOrder]

    # Array to Hold Best Cost Values
    BestCost = np.zeros([MaxIt, 1])

    # Store Cost
    WorstCost = pop[-1].Cost

    # Array to Hold Number of Function Evaluations
    nfe = np.zeros([MaxIt, 1])

    for it in range(MaxIt):

        # Calculate Selection Probabilities
        P = np.expm1(-beta * Costs / WorstCost)
        P = P / sum(P)

        # crossover
        popc = np.array([[EmptyIndividual() for i in range(int(nc / 2))],
                         [EmptyIndividual()
                          for i in range(int(nc / 2))]]).transpose()

        for k in range(int(nc / 2)):

            i1 = roulette_wheel_selection(P)
            i2 = roulette_wheel_selection(P)

            # Select Parents
            p1 = pop[i1]
            p2 = pop[i2]

            # Apply crossover
            popc[k, 0].Position, popc[k, 1].Position = crossover(
                p1.Position, p2.Position)

            # Evaluate Offsprings
            popc[k, 0].Cost = CostFunction(popc[k, 0].Position)
            popc[k, 1].Cost = CostFunction(popc[k, 1].Position)

        popc = np.array([popc[i] for i in range(len(popc))]).transpose()
        popc = np.concatenate((popc[0], popc[1]), axis=0)

        # Mutation
        popm = np.array([EmptyIndividual() for i in range(nm)])

        for k in range(nm):
            # Select Parent
            i = random.randint(0, nPop - 1)
            p = pop[i]

            # Apply Mutation
            popm[k].Position = mutate(np.array(p.Position), mu)

            # Evaluate Mutant
            popm[k].Cost = CostFunction(popm[k].Position)

        # Create Merged Population
        pop = np.concatenate((pop, popc, popm), axis=0)

        # Sort Population
        Costs = np.array([pop[i].Cost for i in range(len(pop))])
        SortOrder = np.argsort(Costs)
        Costs = Costs[SortOrder]
        pop = pop[SortOrder]

        # Truncation
        pop = pop[0:nPop]
        Costs = Costs[0:nPop]

        # Store Best Cost Ever Found
        BestCost[it] = pop[0].Cost
        Best[it] = pop[0]

        # Store Worst Cost Ever Found
        WorstCost = pop[-1].Cost

        # Store NFE
        nfe[it] = NFE

        # Show Iteration Information
        print('Iteration', it, ': NFE = ', nfe[it], ', Best Cost = ',
              BestCost[it], 'Chromosome: ', pop[0].Position)
Example #20
0
def run_tests(num_to_check=10, smaller_num_to_check = 10):

    import taut
    veering_isosigs = parse_data_file("Data/veering_census.txt")
    print("testing is_taut")
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        assert taut.is_taut(tri, angle), sig

    print("testing isosig round trip")
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        recovered_sig = taut.isosig_from_tri_angle(tri, angle)
        assert sig == recovered_sig, sig
        # we only test this round trip - the other round trip does not
        # make sense because tri->isosig is many to one.

    import transverse_taut
    print("testing is_transverse_taut")
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        assert transverse_taut.is_transverse_taut(tri, angle), sig

    non_transverse_taut_isosigs = parse_data_file("Data/veering_non_transverse_taut_examples.txt")
    print("testing not is_transverse_taut")
    for sig in non_transverse_taut_isosigs:
        tri, angle = taut.isosig_to_tri_angle(sig)
        assert not transverse_taut.is_transverse_taut(tri, angle), sig

    import veering
    print("testing is_veering")
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        assert veering.is_veering(tri, angle), sig

    # tri, angle = taut.isosig_to_tri_angle("cPcbbbdxm_10")
    # explore_mobius_surgery_graph(tri, angle, max_tetrahedra = 12)
    # # tests to see that it makes only veering triangulations as it goes

    import veering_dehn_surgery
    print("testing veering_dehn_surgery")
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        for face_num in veering_dehn_surgery.get_mobius_strip_indices(tri):
            (tri_s, angle_s, face_num_s) = veering_dehn_surgery.veering_mobius_dehn_surgery(tri, angle, face_num)
            assert veering.is_veering(tri_s, angle_s), sig
            
    import veering_fan_excision
    print("testing veering_fan_excision")
    m003, _ = taut.isosig_to_tri_angle('cPcbbbdxm_10')
    m004, _ = taut.isosig_to_tri_angle('cPcbbbiht_12')
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        tet_types = veering.is_veering(tri, angle, return_type = "tet_types")
        if tet_types.count("toggle") == 2:
            excised_tri, _ = veering_fan_excision.excise_fans(tri, angle)
            assert ( excised_tri.isIsomorphicTo(m003) != None or
                     excised_tri.isIsomorphicTo(m004) != None ), sig

    import pachner
    print("testing pachner with taut structure")
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        face_num = random.randrange(tri.countTriangles())
        result = pachner.twoThreeMove(tri, face_num, angle = angle, return_edge = True)  
        if result != False: 
            tri2, angle2, edge_num = result
            tri3, angle3 = pachner.threeTwoMove(tri2, edge_num, angle = angle2)
            assert taut.isosig_from_tri_angle(tri, angle) == taut.isosig_from_tri_angle(tri3, angle3), sig

    import branched_surface
    import regina
    print("testing branched_surface and pachner with branched surface")
    for sig in random.sample(veering_isosigs, num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        tri_original = regina.Triangulation3(tri) #copy
        branch = branched_surface.upper_branched_surface(tri, angle, return_lower = random.choice([True, False]))
        
        ### test branch isosig round trip
        sig_with_branch = branched_surface.isosig_from_tri_angle_branch(tri, angle, branch)
        tri2, angle2, branch2 = branched_surface.isosig_to_tri_angle_branch(sig_with_branch)
        assert (branch == branch2) and (angle == angle2), sig

        branch_original = branch[:] #copy
        face_num = random.randrange(tri.countTriangles())
        out = pachner.twoThreeMove(tri, face_num, branch = branch, return_edge = True)
        if out != False:
            tri, possible_branches, edge_num = out
            tri, branch = pachner.threeTwoMove(tri, edge_num, branch = possible_branches[0])
            all_isoms = tri.findAllIsomorphisms(tri_original)
            all_branches = [branched_surface.apply_isom_to_branched_surface(branch, isom) for isom in all_isoms]
            assert branch_original in all_branches, sig

    import flow_cycles
    import drill
    print("testing taut and branched drill + semiflows on drillings")
    for sig in random.sample(veering_isosigs, smaller_num_to_check):
        tri, angle = taut.isosig_to_tri_angle(sig)
        branch = branched_surface.upper_branched_surface(tri, angle) ### also checks for veering and transverse taut
        found_loops = flow_cycles.find_flow_cycles(tri, branch)
        for loop in random.sample(found_loops, min(len(found_loops), 5)):  ## drill along at most 5 loops
            tri, angle = taut.isosig_to_tri_angle(sig)
            branch = branched_surface.upper_branched_surface(tri, angle) 
            tri_loop = flow_cycles.flow_cycle_to_triangle_loop(tri, branch, loop)
            if tri_loop != False: 
                if not flow_cycles.tri_loop_is_boundary_parallel(tri_loop, tri):
                    drill.drill(tri, tri_loop, angle = angle, branch = branch, sig = sig)
                    assert branched_surface.has_non_sing_semiflow(tri, branch), sig

    print("all basic tests passed")

    try:
        import snappy
        import snappy_util
        snappy_working = True
    except:
        print("failed to import from snappy?")
        snappy_working = False

    if snappy_working:        
        print("testing algebraic intersection")
        census = snappy.OrientableCuspedCensus() # not a set or list, so can't use random.sample
        for i in range(10):
            M = random.choice(census)
            n = M.num_cusps()
            peripheral_curves = M.gluing_equations()[-2*n:]
            for i in range(2*n):
                for j in range(i, 2*n):
                    alg_int = snappy_util.algebraic_intersection(peripheral_curves[i], peripheral_curves[j])
                    if i % 2 == 0 and j == i + 1:
                        assert alg_int == 1, M.name()
                    else:
                        assert alg_int == 0, M.name()
                       
    if snappy_working:
        import veering_drill_midsurface_bdy
        print("testing veering drilling and filling")
        for sig in random.sample(veering_isosigs[:3000], num_to_check):
            T, per = veering_drill_midsurface_bdy.drill_midsurface_bdy(sig)
            M = snappy.Manifold(T.snapPea())
            M.set_peripheral_curves("shortest")
            L = snappy_util.get_slopes_from_peripherals(M, per)
            M.dehn_fill(L)
            N = snappy.Manifold(sig.split("_")[0])
            assert M.is_isometric_to(N), sig

    if snappy_working:
        print("all tests depending on snappy passed")
   
    # try:
    #     from hashlib import md5
    #     from os import remove
    #     import pyx
    #     from boundary_triangulation import draw_triangulation_boundary_from_veering_isosig
    #     pyx_working = True
    # except:
    #     print("failed to import from pyx?")
    #     pyx_working = False

    # ladders_style_sigs = {
    #     "cPcbbbiht_12": "f34c1fdf65db9d02994752814803ae01",
    #     "gLLAQbecdfffhhnkqnc_120012": "091c85b4f4877276bfd8a955b769b496",
    #     "kLALPPzkcbbegfhgijjhhrwaaxnxxn_1221100101": "a0f15a8454f715f492c74ce1073a13a4",
    # }

    # geometric_style_sigs = {
    #     "cPcbbbiht_12": "1e74d0b68160c4922e85a5adb20a0f1d",
    #     "gLLAQbecdfffhhnkqnc_120012": "856a1fce74eb64f519bcda083303bd8f",
    #     "kLALPPzkcbbegfhgijjhhrwaaxnxxn_1221100101": "33bd23b34c5d977a103fa50ffe63120a",
    # }

    # args = {
    #     "draw_boundary_triangulation":True,
    #     "draw_triangles_near_poles": False,
    #     "ct_depth":-1,
    #     "ct_epsilon":0.03,
    #     "global_drawing_scale": 4,
    #     "delta": 0.2,
    #     "ladder_width": 10.0,
    #     "ladder_height": 20.0,
    #     "draw_labels": True,
    # }

    # shapes_data = read_from_pickle("Data/veering_shapes_up_to_ten_tetrahedra.pkl")

    # if pyx_working:
    #     for sig in ladders_style_sigs:
    #         print("testing boundary triangulation pictures, ladder style", sig)
    #         args["tet_shapes"] = shapes_data[sig]
    #         args["style"] = "ladders"
    #         file_name = draw_triangulation_boundary_from_veering_isosig(sig, args = args) 
    #         f = open(file_name, "rb")
    #         file_hash = md5(f.read())
    #         assert file_hash.hexdigest() == ladders_style_sigs[sig]
    #         f.close()
    #         remove(file_name)
        
    # if pyx_working:
    #     for sig in geometric_style_sigs:
    #         print("testing boundary triangulation pictures, ladder style", sig)
    #         args["tet_shapes"] = shapes_data[sig]
    #         args["style"] = "geometric"
    #         file_name = draw_triangulation_boundary_from_veering_isosig(sig, args = args) 
    #         f = open(file_name, "rb")
    #         file_hash = md5(f.read())
    #         assert file_hash.hexdigest() == geometric_style_sigs[sig]
    #         f.close()
    #         remove(file_name)

    # if pyx_working: 
    #     print("all tests depending on pyx passed")

    veering_polys = {
        "cPcbbbiht_12": [-4, -1, 1, 4],
        "eLMkbcddddedde_2100": [-2, -2, -2, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 2, 2],
        "gLLAQbecdfffhhnkqnc_120012": [-1, -1, -1, -1, 1, 1, 1, 1],
        "gLLPQcdfefefuoaaauo_022110": [-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1],
    }

    # veering_polys = { ### old
    #     "cPcbbbiht_12": "a^3 - 4*a^2 + 4*a - 1",
    #     "eLMkbcddddedde_2100": "a^6*b - a^6 - 2*a^5*b - a^4*b^2 + a^5 + 2*a^4*b + a^3*b^2 - 2*a^3*b + a^3 + 2*a^2*b + a*b^2 - a^2 - 2*a*b - b^2 + b",
    #     "gLLAQbecdfffhhnkqnc_120012": "a^7 + a^6 + a^5 + a^4 - a^3 - a^2 - a - 1",
    #     "gLLPQcdfefefuoaaauo_022110": "a^12*b^3 - a^11*b^2 - a^10*b^3 - a^10*b^2 - a^7*b^3 - a^7*b^2 - a^6*b^3 + a^7*b + a^5*b^2 - a^6 - a^5*b - a^5 - a^2*b - a^2 - a*b + 1",
    # }

    taut_polys = {
        "cPcbbbiht_12": [-3, 1, 1],
        "eLMkbcddddedde_2100": [-1, -1, -1, 1, 1],
        "iLLAwQcccedfghhhlnhcqeesr_12001122": [],
    }

    # taut_polys = { ### old
    #     "cPcbbbiht_12": "a^2 - 3*a + 1",
    #     "eLMkbcddddedde_2100": "a^2*b - a^2 - a*b - b^2 + b",
    #     "iLLAwQcccedfghhhlnhcqeesr_12001122": "0",
    # }

    torus_bundles = [
        "cPcbbbiht_12",
        "eLMkbcdddhhqqa_1220",
        "gLMzQbcdefffhhqqqdl_122002",
    ]

    measured = [
        "gLLAQbecdfffhhnkqnc_120012",
        "iLLALQcccedhgghhlnxkxrkaa_12001112",
        "iLLAwQcccedfghhhlnhcqeesr_12001122",
    ]

    empties = [
        "fLAMcaccdeejsnaxk_20010",
        "gLALQbcbeeffhhwsras_211220",
        "hLALAkbcbeefgghhwsraqj_2112202",
    ]

    try:
        from sage.rings.integer_ring import ZZ
        sage_working = True
    except:
        print("failed to import from sage?")
        sage_working = False

    if sage_working:
        import taut_polytope
        print("testing is_layered")
        for sig in veering_isosigs[:17]:
            assert taut_polytope.is_layered(sig), sig
        for sig in veering_isosigs[17:21]:
            assert not taut_polytope.is_layered(sig), sig

    if sage_working:
        import fibered
        print("testing is_fibered")
        mflds = parse_data_file("Data/mflds_which_fiber.txt")
        mflds = [line.split("\t")[0:2] for line in mflds]
        for (name, kind) in random.sample(mflds, num_to_check):        
            assert fibered.is_fibered(name) == (kind == "fibered"), name

    if sage_working:
        import veering_polynomial
        import taut_polynomial
        print("testing veering poly")
        for sig in veering_polys:
            p = veering_polynomial.veering_polynomial(sig)
            assert check_polynomial_coefficients(p, veering_polys[sig]), sig
            ### Nov 2021: sage 9.4 changed how smith normal form works, which changed our polynomials
            ### to equivalent but not equal polynomials. To avoid this kind of change breaking things
            ### in the future, we changed to comparing the list of coefficients.
            # assert p.__repr__() == veering_polys[sig]
        print("testing taut poly")
        for sig in taut_polys:
            p = taut_polynomial.taut_polynomial_via_tree(sig)
            assert check_polynomial_coefficients(p, taut_polys[sig]), sig
        #     assert p.__repr__() == taut_polys[sig]
        print("testing divide")
        for sig in random.sample(veering_isosigs[:3000], num_to_check):
            p = veering_polynomial.veering_polynomial(sig)
            q = taut_polynomial.taut_polynomial_via_tree(sig)
            if q == 0:
                assert p == 0, sig
            else:
                assert q.divides(p), sig

    if sage_working:
        print("testing alex")
        for sig in random.sample(veering_isosigs[:3000], num_to_check):        
            snap_sig = sig.split("_")[0]
            M = snappy.Manifold(snap_sig)
            if M.homology().betti_number() == 1:
                assert taut_polynomial.taut_polynomial_via_tree(sig, mode = "alexander") == M.alexander_polynomial(), sig

    if sage_working:
        # would be nice to automate this - need to fetch the angle
        # structure say via z_charge.py...
        print("testing is_torus_bundle")
        for sig in torus_bundles: 
            assert taut_polytope.is_torus_bundle(sig), sig

    if sage_working:
        # ditto
        print("testing is_layered")
        for sig in torus_bundles:
            assert taut_polytope.is_layered(sig), sig
        print("testing measured")
        for sig in measured:
            assert taut_polytope.LMN_tri_angle(sig) == "M", sig
        print("testing empty")
        for sig in empties:
            assert taut_polytope.LMN_tri_angle(sig) == "N", sig

    if sage_working:  # warning - this takes random amounts of time!
        print("testing hom dim")
        for sig in random.sample(veering_isosigs[:3000], 3): # magic number
            # dimension = zero if and only if nothing is carried.
            assert (taut_polytope.taut_cone_homological_dim(sig) == 0) == (taut_polytope.LMN_tri_angle(sig) == "N"), sig

    if sage_working:      

        boundary_cycles = {
            ("eLMkbcddddedde_2100",(2,5,5,1,3,4,7,1)): "((-7, -7, 0, 0, 4, -3, 7, 0), (7, 7, 0, 0, -4, 3, -7, 0))",
            ("iLLLQPcbeegefhhhhhhahahha_01110221",(0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0)): "((0, 0, -1, 1, 1, 0, 1, 1, -1, 0, 0, 0, 0, 1, 0, 1), (0, 0, 1, -1, -1, 0, -1, -1, 1, 0, 0, 0, 0, -1, 0, -1))",
            ("ivvPQQcfhghgfghfaaaaaaaaa_01122000",(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1)): "((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2), (1, 1, 0, 2, -1, 0, -3, 1, 2, -1, -2, 0, 3, -2, -1, 0), (-2, 0, -3, 1, 2, -1, 0, 2, -1, 0, 3, 1, -2, 1, 0, -1), (0, -2, 1, -3, 0, -1, 2, 0, -1, 2, -1, 1, 0, 1, -2, 3))",
        }

        taut_polys_with_cycles = {
            ("eLMkbcddddedde_2100", ((7, 7, 0, 0, -4, 3, -7, 0),)): [-1, -1, -1, 1, 1],
            ("iLLLQPcbeegefhhhhhhahahha_01110221", ((0, 0, 1, -1, -1, 0, -1, -1, 1, 0, 0, 0, 0, -1, 0, -1),)): [1, 1, 2],
            ("ivvPQQcfhghgfghfaaaaaaaaa_01122000", ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2), (1, 1, 0, 2, -1, 0, -3, 1, 2, -1, -2, 0, 3, -2, -1, 0))): [-4, -1, -1, 1, 1],
        }

        # taut_polys_with_cycles = {
        #     ("eLMkbcddddedde_2100", ((7, 7, 0, 0, -4, 3, -7, 0),)): "a^14 - a^8 - a^7 - a^6 + 1",
        #     ("iLLLQPcbeegefhhhhhhahahha_01110221", ((0, 0, 1, -1, -1, 0, -1, -1, 1, 0, 0, 0, 0, -1, 0, -1),)): "a^2 + 2*a + 1",
        #     ("ivvPQQcfhghgfghfaaaaaaaaa_01122000", ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2), (1, 1, 0, 2, -1, 0, -3, 1, 2, -1, -2, 0, 3, -2, -1, 0))): "a*b^2 - a^2 - 4*a*b - b^2 + a",
        # }


        taut_polys_image = {
            ('eLMkbcddddedde_2100', ((7, 8, -1, 0, -4, 4, -8, 0),)):[-1, -1, -1, 1, 1],
            ('ivvPQQcfhghgfghfaaaaaaaaa_01122000', ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2),)):[-2, -2, -1, -1, 1, 1],
            ('ivvPQQcfhghgfghfaaaaaaaaa_01122000', ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2), (1, 1, 0, 2, -1, 0, -3, 1, 2, -1, -2, 0, 3, -2, -1, 0))):[-4, -1, -1, 1, 1]
        }

        # taut_polys_image = {
        #     ('eLMkbcddddedde_2100', ((7, 8, -1, 0, -4, 4, -8, 0),)):"a^16 - a^9 - a^8 - a^7 + 1",
        #     ('ivvPQQcfhghgfghfaaaaaaaaa_01122000', ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2),)):"a*b^2*c - 2*a*b*c - b^2*c - a^2 - 2*a*b + a",
        #     ('ivvPQQcfhghgfghfaaaaaaaaa_01122000', ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2), (1, 1, 0, 2, -1, 0, -3, 1, 2, -1, -2, 0, 3, -2, -1, 0))):"a*b^2 - a^2 - 4*a*b - b^2 + a"
        # }

        alex_polys_with_cycles = {
            ("eLMkbcddddedde_2100",((7, 7, 0, 0, -4, 3, -7, 0),)): [-2, -1, -1, -1, 1, 1, 1, 2],
            ("iLLLQPcbeegefhhhhhhahahha_01110221", ((0, 0, 1, -1, -1, 0, -1, -1, 1, 0, 0, 0, 0, -1, 0, -1),)): [-3, -1, 1, 3],
            ("ivvPQQcfhghgfghfaaaaaaaaa_01122000", ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2), (1, 1, 0, 2, -1, 0, -3, 1, 2, -1, -2, 0, 3, -2, -1, 0))): [-1, -1, 1, 1],
        }

        # alex_polys_with_cycles = {
        #     ("eLMkbcddddedde_2100",((7, 7, 0, 0, -4, 3, -7, 0),)): "a^15 - a^14 + a^9 - 2*a^8 + 2*a^7 - a^6 + a - 1",
        #     ("iLLLQPcbeegefhhhhhhahahha_01110221", ((0, 0, 1, -1, -1, 0, -1, -1, 1, 0, 0, 0, 0, -1, 0, -1),)): "3*a^3 - a^2 + a - 3",
        #     ("ivvPQQcfhghgfghfaaaaaaaaa_01122000", ((1, 1, 2, 0, -1, 2, 1, -3, 0, -1, 0, -2, -1, 0, 3, -2), (1, 1, 0, 2, -1, 0, -3, 1, 2, -1, -2, 0, 3, -2, -1, 0))): "a*b^2 - a^2 - b^2 + a",
        # }

    if sage_working:
        import taut_carried     
        print("testing boundary cycles")
        for sig, surface in boundary_cycles:
            surface_list = list(surface)
            cycles = taut_carried.boundary_cycles_from_surface(sig, surface_list)
            cycles = tuple(tuple(cycle) for cycle in cycles)
            assert cycles.__repr__() == boundary_cycles[(sig, surface)], sig

    if sage_working:
        print("testing taut with cycles")
        for sig, cycles in taut_polys_with_cycles:
            cycles_in = [list(cycle) for cycle in cycles]
            p = taut_polynomial.taut_polynomial_via_tree(sig, cycles_in)
            assert check_polynomial_coefficients(p, taut_polys_with_cycles[(sig, cycles)]), sig
            # assert p.__repr__() == taut_polys_with_cycles[(sig, cycles)]

    if sage_working:
        print("testing taut with images")
        for sig, cycles in taut_polys_image:
            cycles_in = [list(cycle) for cycle in cycles]
            p = taut_polynomial.taut_polynomial_image(sig, cycles_in)
            assert check_polynomial_coefficients(p, taut_polys_image[(sig, cycles)]), sig
            # assert p.__repr__() == taut_polys_image[(sig, cycles)]

    if sage_working:
        print("testing alex with cycles")
        for sig, cycles in alex_polys_with_cycles:
            cycles_in = [list(cycle) for cycle in cycles]
            p = taut_polynomial.taut_polynomial_via_tree(sig, cycles_in, mode = "alexander")
            assert check_polynomial_coefficients(p, alex_polys_with_cycles[(sig, cycles)]), sig
            # assert p.__repr__() == alex_polys_with_cycles[(sig, cycles)]

    if sage_working:
        import edge_orientability
        import taut_euler_class
        print("testing euler and edge orientability")
        for sig in random.sample(veering_isosigs[:3000], 3):
            # Theorem: If (tri, angle) is edge orientable then e = 0.
            assert not ( edge_orientability.is_edge_orientable(sig) and
                         (taut_euler_class.order_of_euler_class_wrapper(sig) == 2) ), sig

    if sage_working:
        # Theorem: If (tri, angle) is edge orientable then taut poly = alex poly.
        # taut_polynomial.taut_polynomial_via_tree(sig, mode = "alexander") ==
        #      taut_polynomial.taut_polynomial_via_tree(sig, mode = "taut")
        pass
            
    if sage_working:
        print("testing exotics")
        for sig in random.sample(veering_isosigs[:3000], 3):
            tri, angle = taut.isosig_to_tri_angle(sig)
            T = veering.veering_triangulation(tri, angle)
            is_eo = T.is_edge_orientable()
            for angle in T.exotic_angles():
                assert taut_polytope.taut_cone_homological_dim(tri, angle) == 0, sig
                assert is_eo == transverse_taut.is_transverse_taut(tri, angle), sig

    ### test for drill_midsurface_bdy: drill then fill, check you get the same manifold

    if sage_working:
        from sage.combinat.words.word_generators import words
        from sage.modules.free_module_integer import IntegerLattice
        from sage.modules.free_module import VectorSpace
        from sage.matrix.constructor import Matrix
        import z_charge
        import z2_taut
        import regina

        ZZ2 = ZZ.quotient(ZZ(2))

        sig_starts = ["b+-LR", "b++LR"]

        print("testing lattice for punc torus bundle")
        for i in range(3):
            for sig_start in sig_starts:
                sig = sig_start + str(words.RandomWord(8, 2, "LR"))  # 8 is a magic number
                M = snappy.Manifold(sig)
                tri = regina.Triangulation3(M)
                t, A = z_charge.sol_and_kernel(M)
                B = z_charge.leading_trailing_deformations(M)
                C = z2_taut.cohomology_loops(tri)

                AA = IntegerLattice(A)
                BB = IntegerLattice(B)
                assert AA == BB.saturation(), sig

                dim = 3*M.num_tetrahedra()
                V = VectorSpace(ZZ2, dim)
                AA = V.subspace(A)
                BB = V.subspace(B)
                CM = Matrix(ZZ2, C)
                CC = CM.right_kernel()
                assert AA.intersection(CC) == BB , sig
                ## so l-t defms are the part of the kernel that doesn't flip over

    if sage_working:
        print("testing charges for punc torus bundle")
        for i in range(3):
            for sig_start in sig_starts:
                sig = sig_start + str(words.RandomWord(8, 2, "LR"))  # 8 is a magic number
                M = snappy.Manifold(sig)
                assert z_charge.can_deal_with_reduced_angles(M), sig
    
    if sage_working:
        import carried_surface
        import mutation
        print("testing building carried surfaces and mutations")
        sigs_weights = [
            ['iLLLPQccdgefhhghqrqqssvof_02221000',  (0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0)], 
            ['jLLAvQQcedehihiihiinasmkutn_011220000', (2, 0, 1, 0, 0, 0, 1, 2, 0, 2, 0, 2, 1, 0, 0, 0, 1, 0)],
            ['jLLAvQQcedehihiihiinasmkutn_011220000', (0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0)],
            ['jLLLMPQcdgfhfhiiihshassspiq_122201101', (0, 0, 4, 0, 4, 1, 0, 2, 2, 0, 1, 0, 0, 4, 0, 4, 0, 0)]
        ]
        strata = [
            ((1, 2), [2, 2]), 
            ((2, 4), [5, 5, 1, 1]),
            ((0, 3), [2, 0, 0]),
            ((6, 1), [22])
        ]
        orders_of_veering_symmetry_groups = [4, 2, 2, 2]
        
        for i in range(len(sigs_weights)):
            tri, angle = taut.isosig_to_tri_angle(sigs_weights[i][0])
            weights = sigs_weights[i][1]
            surface, edge_colours = carried_surface.build_surface(tri, angle, weights, return_edge_colours = True)
            assert strata[i] == carried_surface.stratum_from_weights_surface(weights, surface)
            veering_isoms = carried_surface.veering_symmetry_group(surface, edge_colours)
            assert len(veering_isoms) == orders_of_veering_symmetry_groups[i]
            isom = veering_isoms[1]
            mutation.mutate(tri, angle, weights, isom, quiet = True)
            if i == 0:
                assert tri.isoSig() == 'ivLLQQccfhfeghghwadiwadrv'
                #print('svof to wadrv passed')
            elif i == 1:
                assert tri.isoSig() == 'jvLLAQQdfghhfgiiijttmtltrcr'
                #print('smkutn to tltrcr passed')
            elif i == 2:
                assert tri.isoSig() == 'jLLMvQQcedehhiiihiikiwnmtxk'
                #print('smkutn to mtxk passed')
            elif i == 3:
                assert tri.isoSig() == 'jLLALMQcecdhggiiihqrwqwrafo'
                #print('spiq to rafo passed')
                
                        
    if sage_working:
        print("all tests depending on sage passed")
Example #21
0
    newGeneration=[]

    for j in range(populationLimit):
        fitnessVal = fitnessFunction.fitnessFunction(population[j],city)
        if fitnessVal < bestVal:
            bestVal = fitnessVal
            bestAns = population[j]
    
    for k in range(populationLimit):
        for l in range((k+1), populationLimit):
           # child = population[k]
            child = crossover.crossover(population[k],population[l],city)
            newGeneration.append(child)

    for member in newGeneration:
        member = mutation.mutate(member)

    for m in range(populationLimit):
        newGeneration.append(population[m])

    

    population = top.top(populationLimit,newGeneration,city)



print bestAns
print bestVal

elapsed = time.clock()
Example #22
0
def mutation_1(generation):
    generation = mutation.mutate(generation)
    return generation
Example #23
0
def ga(chord_progression,
       n=40,
       num_iter=200,
       prob_local=.5,
       ngram_generate=None):
    """genetic algorithm
    
    Args:
        chord_progression ((int, int)[]): chord progression which is a list of (chord_root, duration)
        n (int, optional): Defaults at 40.  Population size.
        num_iter (int, optional): Defaults at 200.  Specifies number of iterations of GA before termination
        prob_local (float, optional): Defaults at .5.  Probability of mutations
        ngram_generate (None, optional): Defaults at None.  If True, use ngram to initialize population.  Otherwise, randomly initialize.
    
    Returns:
        (int, (int, int)[]))[]: Chromosome list of final generation in the genetic algorithm
    """
    d = sum([d for (_, d) in chord_progression])  # d is total duration of song
    chromosome_list = initialize_chromosomes(
        n, d, chord_progression,
        ngram_generate=ngram_generate)  # list of (fitness, genotype)
    elitism_coef = 25  # how many elites to keep in each round

    for i in range(0, num_iter):
        new_chromosome_list = []
        # Elitism?
        # keep the highest fitness chromosome
        chromosome_list.sort(key=lambda x: x[0])  # sort by increasing fitness

        to_print = str(i) + ". " + str(chromosome_list[-1][0])
        print to_print
        for i, chrom in enumerate(reversed(chromosome_list)):
            if i >= elitism_coef:
                break
            new_chromosome_list.append(chrom)
        # new_chromosome_list.append(chromosome_list[-1])

        # Crossover n times
        for i in range(n - elitism_coef):
            parent1 = tournament_selection(chromosome_list, 4, .9)
            parent2 = tournament_selection(chromosome_list, 4, .9)
            (child1_genotype, child2_genotype) = crossover(parent1, parent2, d)

            # calculate fitness of both children, and add higher to chromosomes
            fitness1 = calc_fitness(child1_genotype, chord_progression)
            fitness2 = calc_fitness(child2_genotype, chord_progression)

            if fitness1 > fitness2:
                new_chromosome_list.append((fitness1, child1_genotype))
            else:
                new_chromosome_list.append((fitness2, child2_genotype))

        # Mutate based on certain probabilities
        # decide on hill-climbing (don't replace parent if it was not at
        # least as fit!)
        for i, chrom in enumerate(new_chromosome_list):
            if i < elitism_coef:  # maintain elitism
                continue

            old_genotype = chrom[1][:]
            old_fitness = chrom[0]

            new_genotype = mutate(chrom, d, prob_local=prob_local)  # 1
            new_fitness = calc_fitness(new_genotype, chord_progression)
            if new_fitness >= old_fitness:  # hill climbing implemented here
                new_chromosome_list[i] = (new_fitness, new_genotype)
            else:
                new_chromosome_list[i] = (old_fitness, old_genotype)

        chromosome_list = new_chromosome_list

    chromosome_list.sort(reverse=True,
                         key=lambda x: x[0])  # sort by decreasing fitness
    return chromosome_list
Example #24
0
def ga(chord_progression, n=40, num_iter=200, prob_local=.5, ngram_generate=None):
    """genetic algorithm
    
    Args:
        chord_progression ((int, int)[]): chord progression which is a list of (chord_root, duration)
        n (int, optional): Defaults at 40.  Population size.
        num_iter (int, optional): Defaults at 200.  Specifies number of iterations of GA before termination
        prob_local (float, optional): Defaults at .5.  Probability of mutations
        ngram_generate (None, optional): Defaults at None.  If True, use ngram to initialize population.  Otherwise, randomly initialize.
    
    Returns:
        (int, (int, int)[]))[]: Chromosome list of final generation in the genetic algorithm
    """
    d = sum([d for (_, d) in chord_progression])  # d is total duration of song
    chromosome_list = initialize_chromosomes(n, d, chord_progression, ngram_generate=ngram_generate)  # list of (fitness, genotype)
    elitism_coef = 25  # how many elites to keep in each round

    for i in range(0, num_iter):
        new_chromosome_list = []
        # Elitism?
        # keep the highest fitness chromosome
        chromosome_list.sort(key=lambda x: x[0])  # sort by increasing fitness

        to_print = str(i) + ". " + str(chromosome_list[-1][0])
        print to_print
        for i, chrom in enumerate(reversed(chromosome_list)):
            if i >= elitism_coef:
                break
            new_chromosome_list.append(chrom)
        # new_chromosome_list.append(chromosome_list[-1])

        # Crossover n times
        for i in range(n-elitism_coef):
            parent1 = tournament_selection(chromosome_list, 4, .9)
            parent2 = tournament_selection(chromosome_list, 4, .9)
            (child1_genotype, child2_genotype) = crossover(parent1, parent2, d)

            # calculate fitness of both children, and add higher to chromosomes
            fitness1 = calc_fitness(child1_genotype, chord_progression)
            fitness2 = calc_fitness(child2_genotype, chord_progression)

            if fitness1 > fitness2:
                new_chromosome_list.append((fitness1, child1_genotype))
            else:
                new_chromosome_list.append((fitness2, child2_genotype))

        # Mutate based on certain probabilities
        # decide on hill-climbing (don't replace parent if it was not at
        # least as fit!)
        for i, chrom in enumerate(new_chromosome_list):
            if i < elitism_coef:  # maintain elitism
                continue

            old_genotype = chrom[1][:]
            old_fitness = chrom[0]

            new_genotype = mutate(chrom, d, prob_local=prob_local)  # 1
            new_fitness = calc_fitness(new_genotype, chord_progression)
            if new_fitness >= old_fitness:  # hill climbing implemented here
                new_chromosome_list[i] = (new_fitness, new_genotype)
            else:
                new_chromosome_list[i] = (old_fitness, old_genotype)


        chromosome_list = new_chromosome_list

    chromosome_list.sort(reverse=True, key=lambda x: x[0])  # sort by decreasing fitness
    return chromosome_list