Beispiel #1
0
def ten_arr_test(file_name):
    weights, topology = get_genotype_data_from_file(file_name + ".csv")
    net = Network(topology)
    net.set_weights(weights)

    while True:
        ins, out = get_test_ten_arr()
        print("Answer: " + ", ".join("{:.2f}".format(x)
                                     for x in net.process_input(ins)))
        print(net)
Beispiel #2
0
class Agent:
    def __init__(self, genotype, topology):
        self.genotype = genotype
        self.genotype.eval = []
        self.output = []
        self.network = Network(topology)
        self.network.set_weights(genotype.params)

    def __lt__(self, other):
        return self.genotype.fitness < other.genotype.fitness

    def evaluate(self, inputs, correct):
        network_answer = self.network.process_input(inputs)
        self.output = network_answer
        evaluation = Agent.eval_diff(network_answer, correct)
        self.genotype.eval.append(evaluation)
        return evaluation

    @staticmethod
    def eval_diff(answer, correct):
        diff_sum = 0
        for i in range(len(answer)):
            diff_sum = diff_sum + (answer[i] - correct[i])**2
        return diff_sum

    @staticmethod
    def eval_nr_incorrect(answer, correct):
        incorrect = 0
        for i in range(len(answer)):
            diff = correct[i] - answer[i]
            if abs(diff) > 0.01:
                incorrect = incorrect + 1
        return incorrect

    @staticmethod
    def eval_progress(answer, correct):
        sum_percent = 0
        for i in range(len(answer)):
            if correct[i] is 1:
                sum_percent = sum_percent + answer[i]
            else:
                sum_percent = sum_percent + (1 - answer[i])
        return sum_percent / len(answer)
Beispiel #3
0
                computed_b[idx] += layer
        for layer in computed_b:
            layer /= len(self.bias_adj)

        return [computed_w, computed_b]

    @staticmethod
    def sigmoid_prime(value):
        """
        Simple Sigmoid function to determine the activation of the neuron
        :param value: float
        :return: float
        """
        return Layer.sigmoid(value) * (1 - Layer.sigmoid(value))


if __name__ == "__main__":
    from network.network import Network
    from evolution.genotype import Genotype
    net = Network([2, 3, 2])
    gen = Genotype([0] * net.weight_count)
    gen.set_rand_params(-1, 1)
    net.set_weights(gen.params)
    batch = Batch(net)

    ins = [[0.25, 0.65], [0.35, 0.75], [2.3, 5.3]]
    training_data = [ins, ins]
    computed_w, computed_b = batch.run_batch(training_data)

    print("test")