Esempio n. 1
0
 def test_backpropagation_small(self):
     nn = NN.Network(3, 3, [3], weight_scale=.1, random_seed=74,
                     learning_rate=10)
     input_vector = [-1, 0, 1]
     nn.predict(input_vector)
     target_vector = [0., .5, 1.]
     nn.backpropagation(target_vector)
     weights = [[e.weight for e in n.out_edges] for n in nn.layers[0]]
     expected_weights = \
             [[0.036319633959432579, -0.11290500750462876,
                 -0.035007958284649393],
              [0.085253228862123998, 0.073753705059175473,
                 -0.059679883330276906],
              [0.11988016798353364, 0.1754042988325476,
                 -0.068349530989391408]]
     self.assertClose(weights, expected_weights, message=
                      "Network.backpropagation error.")
     input_vector = [5, 2, -1]
     nn.predict(input_vector)
     target_vector = [.9, .5, .1]
     nn.backpropagation(target_vector)
     weights = [[e.weight for e in n.out_edges] for n in nn.layers[0]]
     expected_weights = \
             [[-1.189984620537202, -1.4565500208831874,
                 -1.0525173157157277],
              [-0.40526847293652979, -0.46370430029224807,
                  -0.46668362630270821],
              [0.36514101888286055, 0.44413330150825936,
                  0.13515234049682423]]
     self.assertClose(weights, expected_weights, message=
                      "Network.backpropagation error.")
Esempio n. 2
0
 def test_backpropagation_deep(self):
     nn = NN.Network(1,
                     1, [2, 3, 4, 5, 4, 3, 2],
                     weight_scale=.1,
                     random_seed=22,
                     learning_rate=10.)
     input_vector = [0.5]
     nn.predict(input_vector)
     target_vector = [1.]
     nn.backpropagation(target_vector)
     weights = [[e.weight for e in n.out_edges] for n in nn.layers[0]]
     expected_weights = [[-0.0091949919644617774, -0.14633506539676461]]
     self.assertClose(weights,
                      expected_weights,
                      rtol=1e-10,
                      message="Network.backpropagation error.")
     input_vector = [-1.5]
     nn.predict(input_vector)
     target_vector = [0.]
     nn.backpropagation(target_vector)
     weights = [[e.weight for e in n.out_edges] for n in nn.layers[0]]
     expected_weights = [[-0.0091949923151859146, -0.14633506408885485]]
     self.assertClose(weights,
                      expected_weights,
                      rtol=1e-10,
                      message="Network.backpropagation error.")
Esempio n. 3
0
 def test_backpropagation_wide(self):
     nn = NN.Network(2, 4, [20], weight_scale=.1, random_seed=91,
                     learning_rate=10)
     input_vector = [-1, 0]
     nn.predict(input_vector)
     target_vector = [0, 1, 1, 0]
     nn.backpropagation(target_vector)
     weights = [[e.weight for e in n.out_edges] for n in nn.layers[0]]
     expected_weights = \
             [[-0.071492903626287255, -0.05816238760199402,
               -0.03534024709141706, -0.0074214687423239528,
               0.18498969560664008, -0.0961411450078892,
               0.18063728584818162, 0.181133023184328,
               0.0042128985429375852, 0.12931848600464421,
               -0.12589355751686468, -0.24000775940669486,
               0.023047766613129607, 0.16948680314228165,
               0.14792772797407688, -0.027019939341866139,
               0.024260766063869101, -0.055284070416947079,
               -0.15507955962683462, -0.0016508518646832972],
             [-0.066477562575821533, 0.057844215254984593,
               0.21173782908237093, -0.044639434521677783,
               -0.066693906489248625, 0.048079724931287253,
               0.19849801065676126, 0.039190893552514122,
               0.23982028022526714, 0.24173602732584878,
               -0.161411983056928, -0.021642658107796246,
               0.16636321183995761, -0.012201627077841688,
               0.1398420657053959, -0.063836467538069333,
               -0.079995595017062604, 0.097931238253741282,
               -0.097706732097435287, 0.052231952479944901]]
     self.assertClose(weights, expected_weights, message=
                      "Network.backpropagation error.")
     input_vector = [-5, 5]
     nn.predict(input_vector)
     target_vector = [0.1, 0.3, 0.5, 0.7]
     nn.backpropagation(target_vector)
     weights = [[e.weight for e in n.out_edges] for n in nn.layers[0]]
     expected_weights = \
             [[-0.063564105054976353,  -0.051852192557495742,
               -0.030155824899281692,  -0.00041161932703303629,
               0.18935121236082944,  -0.089404326356703048,
               0.18544502265936483,  0.1875767624069741,
               0.0096145523998172316,  0.13496652541688064,
               -0.11898914074433696,  -0.23343026950553339,
               0.029923943460375007,  0.17368059813176501,
               0.15461536997364966,  -0.018118009116766205,
               0.030974150401697258,  -0.048381843815970889,
               -0.14634132674254666,  0.0051102628153846131],
             [-0.074406361147132435,  0.051534020210486316,
               0.20655340689023557,  -0.051649283936968701,
               -0.071055423243437985,  0.041342906280101101,
               0.19369027384557805,  0.032747154329868036,
               0.23441862636838748,  0.23608798791361235,
               -0.1683163998294557,  -0.02822014800895771,
               0.15948703499271222,  -0.016395422067325041,
               0.13315442370582312,  -0.07273839776316926,
               -0.086708979354890761,  0.091029011652765085,
               -0.10644496498172325,  0.045470837799876991]]
     self.assertClose(weights, expected_weights, message=
                      "Network.backpropagation error.")
Esempio n. 4
0
 def test_predict_deep(self):
     nn = NN.Network(1, 1, [2,3,4,5,4,3,2], weight_scale=10, random_seed=22)
     input_vector = [5.5]
     target_vector = [0.079550580569749604]
     output_vector = nn.predict(input_vector)
     self.assertClose(output_vector, target_vector, message=
                      "Network.predict error.")
     input_vector = [-5.0]
     target_vector = [0.079550580570209389]
     output_vector = nn.predict(input_vector)
     self.assertClose(output_vector, target_vector, message=
                      "Network.predict error.")
Esempio n. 5
0
 def test_predict_wide(self):
     nn = NN.Network(4, 2, [20], weight_scale=10, random_seed=91)
     input_vector = [-1, 0, 1, 2]
     target_vector = [0.88446717331877323, 0.99999999999999911]
     output_vector = nn.predict(input_vector)
     self.assertClose(output_vector, target_vector, message=
                      "Network.predict error.")
     input_vector = [-5, 0, -1, -1.5]
     target_vector = [1.0, 9.4855870942287332e-14]
     output_vector = nn.predict(input_vector)
     self.assertClose(output_vector, target_vector, message=
                      "Network.predict error.")
Esempio n. 6
0
 def test_predict_small(self):
     nn = NN.Network(3, 3, [3], weight_scale=10, random_seed=74)
     input_vector = [-1, 0, 1]
     target_vector = [7.9153787496713187e-08, 0.041550054491001708,
                      0.99999753053427387]
     output_vector = nn.predict(input_vector)
     self.assertClose(output_vector, target_vector, message=
                      "Network.predict error.")
     input_vector = [5, 2, -1]
     target_vector = [1.6164330657508448e-06, 0.0095407829940506415,
                      0.42283954465081319]
     output_vector = nn.predict(input_vector)
     self.assertClose(output_vector, target_vector, message=
                      "Network.predict error.")
    def __init__(self, topology):
        self.topology = topology

        # A brain is the neural network specifically created from this code.
        self.brain = nn.Network(topology)

        # Weights of neural network stored here so it can be easily modified for training
        self.weights = self.brain.get_network_weights()

        # Fitness: calculated value representing how well this solution performs
        self.fitness = None
        # Time: Time spent in environment without crashing; used to calculate fitness.
        self.time = 0

        # Bool to store whether this solution is the best of their generation
        self.fittest = False
def cross_validation(weights, reg_param, instances, k=10, r=10):
    folds = stratified_k_fold(instances, k)
    classes = get_classes(instances)
    n_classes = len(classes)
    confusion_matrix = nested_dict(n_classes)
    fmeasures = []
    Jmean = 0
    JcvMean = 0
    for current_fold in folds:
        for c1 in classes:
            for c2 in classes:
                confusion_matrix[c1][c2] = 0
        training_data = []
        for fold in folds:
            if fold != current_fold:
                for instance in fold:
                    training_data.append(instance)

        net = nn.Network(weights, reg_param)
        # TODO: Resolver detalhes do treinamento
        print("Treinando {n} instâncias".format(n=len(training_data)))
        J = net.train(training_data)
        Jmean = Jmean + J
        print("J final: " + str(J))
        for instance in current_fold:
            prediction = net.predict_class(instance)
            print('Decisão: ', prediction)
            confusion_matrix[instance.klass][prediction] += 1
        Jcv = net.J(current_fold)
        JcvMean = JcvMean + Jcv
        print("Jcv: " + str(Jcv))
        # TODO: Verificar se está certo isto.
        tp, fp, fn = sum_tp_fp_fn(confusion_matrix)
        fmeasures.append(f_measure(1, tp, fp, fn))
    Jmean = Jmean / k
    JcvMean = JcvMean / k
    print("J médio é: " + str(Jmean))
    print("Jcv médio é: " + str(JcvMean))
    # Retorna a média e o desvio padrão das fmeasures
    return (mean(fmeasures), stdev(fmeasures))
Esempio n. 9
0
for number in range(number_of_neurons_in_hidden_layer):
    neuron = nn.Neuron()
    neuron.name = "H{}".format(number)
    neuron.activation_function = utils.sigmoid_activation_function
    hidden_layer.add_neuron(neuron)

for number in range(number_of_neurons_in_output_layer):
    neuron = nn.Neuron()
    neuron.name = "O{}".format(number)
    neuron.activation_function = utils.sigmoid_activation_function
    output_layer.add_neuron(neuron)

input_layer.set_output_to_layer(hidden_layer)
hidden_layer.set_output_to_layer(output_layer)

network = nn.Network()
network.add_layer(input_layer)
network.add_layer(hidden_layer)
network.add_layer(output_layer)

network.inputs = [
    [0, 0],
    [0, 1],
    [1, 0],
    [1, 1]
]

network.formal_outputs = [
    [0],
    [0],
    [0],
Esempio n. 10
0
File: main.py Progetto: iqHpi/FF-net
import loader
import neural_net

training_data, validation_data, test_data = loader.load_data_wrapper()

net = neural_net.Network([784, 64, 16, 10])

#SGD(training_data, epochs, mini_batch_size, eta, test_data)
net.SGD(training_data, 30, 10, 3.5, test_data=test_data)