Example #1
0
        x = np.zeros((nc*3, 1))
        for i in range(nc*3):
            x[i][0] = random.choice([0, 1])
        y = np.zeros((1, 1))
        y[0][0] = f(x)
        data.append((x, y))
    return data


nhidden = 5  # number of neurons in the hidden layer
ntrain = 20  # number of train data samples
ntest = 2  # number of test data samples

nc = 10  # number of clauses: (x0 or x1 or x2) and ...
epochs = 2  # number of iterations for the stochastic gradient descent
batch_size = 10  # size of each batch for the stochastic gradient descent
learn_rate = 1.0  # learn rate
lmbda = 0.1  # regularization factor

# generate data
train_data = gen_data(ntrain, nc)
test_data = gen_data(ntest, nc)

# create and train the neural network
nn = NeuralNetwork([nc*3, nhidden, 1], cost.Quadratic, activation.Sigmoid)

test_func = lambda o, y: abs(o[0][0] - y[0][0]) <= 0.5
nn.learn(train_data, epochs, batch_size, learn_rate, lmbda,
         test_data=test_data, test_func=test_func)

Example #2
0
        return data


def load_train_data():
    train_data = read_idx_gzfile('data/mnist/train-images-idx3-ubyte.gz')
    labels = read_idx_gzfile('data/mnist/train-labels-idx1-ubyte.gz')
    return zip(train_data, labels)


def load_test_data():
    test_data = read_idx_gzfile('data/mnist/t10k-images-idx3-ubyte.gz')
    labels = read_idx_gzfile('data/mnist/t10k-labels-idx1-ubyte.gz')
    return list(zip(test_data, labels))

data = load_test_data()
train_data = data[:9000]
test_data = data[9000:]


epochs = 1000  # number of iterations for the stochastic gradient descent
batch_size = 10  # size of each batch for the stochastic gradient descent
learn_rate = 3.0  # learn rate
lmbda = 0.0  # regularization factor

# create and train the neural network
nn = NeuralNetwork([784, 30, 10], cost.CrossEntropy, activation.Sigmoid)

test_func = lambda o, y: o.argmax() == y.argmax()
nn.learn(train_data, epochs, batch_size, learn_rate, lmbda,
         test_data=test_data, test_func=test_func, report_freq=10)