コード例 #1
0
ファイル: test.py プロジェクト: CKrawczyk/aggregation
__author__ = "greg"
import loader
import neural_network

training_data, validation_data, test_data = loader.load_data_wrapper()
net = neural_network.Network([784, 30, 10])

net.SGD(training_data, 30, 10, 3.0, test_data=test_data)
コード例 #2
0
# ----------------------
# - read the input data:

import loader
import matrix as mtx

training_data, validation_data, test_data = loader.load_data_wrapper()

training_data = list(training_data)

net = mtx.Matrix([784, 100, 10])
net.learn(training_data, 30, 10, 5.0, test_data)

コード例 #3
0
def train_network(net):
	# Load training data
	training_data, validation_data, test_data = loader.load_data_wrapper()

	# Train neural network
	net.SGD(training_data, 30, 10, 3.0, test_data=test_data)
コード例 #4
0
    def evaluate(self, data):
        test_results = [(np.argmax(self.feedforward(x)), np.argmax(y))
                        for (x, y) in data]
        return sum(int(x == y) for (x, y) in test_results)

    def save(self, filename):
        data = {
            "sizes": self.sizes,
            "weights": [w.tolist() for w in self.weights],
            "biases": [b.tolist() for b in self.biases],
            "cost": str(self.cost.__name__)
        }

        f = open(filename, "w")
        json.dump(data, f)
        f.close()


def sigmoid(z):
    return 1.0 / (1.0 + np.exp(-z))


def sigmoid_prime(z):
    return sigmoid(z) * (1 - sigmoid(z))


if __name__ == '__main__':
    tr, va, te = loader.load_data_wrapper()
    network = Network([784, 30, 10])
    network.SGD(tr, 30, 10, 0.5, 5.0, va, True, True, True, True)
    network.save("improved_network_results.json")
コード例 #5
0
import numpy as np
from keras.models import Sequential
from keras.layers import Dense

from loader import load_data_wrapper

train, valid, test = load_data_wrapper()

model = Sequential()

model.add(Dense(784, input_dim=784, activation='relu'))
model.add(Dense(30, activation='relu'))
model.add(Dense(10, activation='softmax'))

model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

print("finished compiling")
inp = []
out = []
count = 0
for i, o in train:
    if len(inp) > 8000:
        break
    base_i = []
    base_o = []
    for item in i:
        base_i.append(item[0])

    for item in o: