Пример #1
0
def add_columns(parent, dk, almost_zero):
    child = copy.deepcopy(parent)
    G = child.variables[0]
    S = child.variables[1]
    m = S.shape[0]
    n = G.shape[0]
    k = child.objectives[1]
    # create larger arrays and store G,S in them
    newG = np.zeros((n, k + dk))
    newS = np.zeros((m, k + dk, k + dk))
    newG[:, :k] = G
    newS[:, :k, :k] = S
    # add random columns to newG
    newG[:, k:] = almost_zero * np.random.rand(n, dk)
    # create side and corner block of S in symmetric way ...
    S_side = almost_zero * np.random.rand(m, k, dk)
    S_corner = 0.5 * almost_zero * np.random.rand(m, dk, dk)
    S_corner = S_corner + np.transpose(S_corner, (0, 2, 1))
    # ... and add those blocks to newS
    newS[:, :k, k:] = S_side
    newS[:, k:, :k] = np.transpose(S_side, (0, 2, 1))
    newS[:, k:, k:] = S_corner
    child.variables = [newG, newS]
    child.evaluated = False
    return child
    def evolve(self, parents):
        result = copy.deepcopy(parents[0])
        G1 = np.array(parents[0].variables)[0:self.n * self.k].reshape(
            (self.n, self.k))
        Ss1 = np.array(parents[0].variables)[(self.n * self.k):].reshape(
            (self.m, self.k, self.k))

        G2 = np.array(parents[1].variables)[0:self.n * self.k].reshape(
            (self.n, self.k))
        Ss2 = np.array(parents[1].variables)[(self.n * self.k):].reshape(
            (self.m, self.k, self.k))

        newG = np.zeros(G1.shape)

        from_G1 = 0

        for i in range(G1.shape[0]):
            bit = bool(random.getrandbits(1))
            from_G1 += int(bit)
            newG[i, :] = (G1[i, :] if bit else G2[i, :])

        if from_G1 < G1.shape[0] / 2:
            result.variables = list(
                np.concatenate((newG.flatten(), Ss1.flatten()), axis=0))
        else:
            result.variables = list(
                np.concatenate((newG.flatten(), Ss2.flatten()), axis=0))
        return [result]
Пример #3
0
 def mutate(self, parent, dk):
     if dk > 0:
         return add_columns(parent, dk, self.almost_zero)
     elif dk < 0:
         child = copy.deepcopy(parent)
         for i in range(-dk):
             child = delete_lest_significant_column(child)
         return child
     else:
         raise PlatypusError('Algorithm wanted to delete 0 columns!')
Пример #4
0
def delete_random_column(parent):
    child = copy.deepcopy(parent)
    k = child.objectives[1]
    G = child.variables[0]
    S = child.variables[1]
    i = np.random.randint(k)
    G = np.delete(G, i, 1)
    S = np.delete(S, i, 1)
    S = np.delete(S, i, 2)
    child.variables = [G, S]
    child.evaluated = False
    return child
Пример #5
0
 def mutate(self,
            parent,
            worsening_factor=5.,
            convergence_steps=150,
            convergence_factor=3.):
     child = copy.deepcopy(parent)
     G = child.variables[0]
     S = child.variables[1]
     k = S.shape[1]
     c, newG, newS, dnfe = self.gd.adam(G, S, self.steps, worsening_factor,
                                        convergence_steps,
                                        convergence_factor)
     child.variables = [newG, newS]
     child.objectives = [c, k]
     child.evaluated = True
     return child, dnfe
    def mutate(self, parent):
        child = copy.deepcopy(parent)
        problem = child.problem
        probability = self.probability

        if isinstance(probability, int):
            probability /= float(
                len([t for t in problem.types if isinstance(t, Real)]))

        if random.uniform(0.0, 1.0) <= self.probability:
            G, Ss = self.reshaper.vec2mat(child.variables)

            c, newG, newS = self.p.adam(G, Ss, self.steps)
            con = self.reshaper.mat2vec(newG, newS)
            child.variables = con.tolist()

            child.objectives = [c]
            child.evaluated = True

        return child
Пример #7
0
 def evolve(self, parents):
     result = copy.deepcopy(parents[0])
     # Join G matrices side by side
     G1 = parents[0].variables[0]
     G2 = parents[1].variables[0]
     newG = self.factorG * np.concatenate((G1, G2), axis=1)
     # Join S tensors as a direct sum
     S1 = parents[0].variables[1]
     S2 = parents[1].variables[1]
     k1 = parents[0].objectives[1]
     k2 = parents[1].objectives[1]
     m = S1.shape[0]
     newS = np.empty((m, k1 + k2, k1 + k2))
     newS[:, :k1, :k1] = self.factorS * S1
     newS[:, k1:, k1:] = self.factorS * S2
     almost_zero = self.almost_zero * np.random.rand(m, k1, k2)
     newS[:, :k1, k1:] = almost_zero
     newS[:, k1:, :k1] = np.transpose(almost_zero, (0, 2, 1))
     # Save to result
     result.variables = [newG, newS]
     result.evaluated = False
     return [result]
Пример #8
0
        algorithm.run(int(nfe))

        elapsed_time = time.time() - start_time
        cumulative_time = cumulative_time + elapsed_time
        cumulative_nfe += algorithm.nfe
        print('time needed=' + str(elapsed_time))

        population = algorithm.result
        new_p = constraint.evolve(population)

        new_new_p = []
        for sub in new_p:
            G = sub.variables[0]
            S = sub.variables[1]
            p_copy = copy.deepcopy(sub)

            S_new = np.zeros((Ri.shape[0], G.shape[1], G.shape[1]))
            for i in range(Ri.shape[0]):
                Rii = Ri[i, :, :]
                S_new[i] = _gradientdescent(G, Rii)

            p_copy.variables = [G, S_new]
            new_new_p.append(p_copy)

        algorithm.evaluate_all(new_new_p)

        # Save values of objectives and solutions
        write_population_objectives(algorithm.result, out_file_name + run_str)
        write_population_objectives(new_new_p,
                                    out_file_name + "_ortoghonal_" + run_str)