Ejemplo n.º 1
0
import numpy as np
import sys
from netExamples.grokking.mnist import \
    images, labels, test_images, test_labels
from nnet import NNet

np.random.seed(1)

batch_size = 100
iterations = 301

nn = NNet(sizes=[784, 100, 10], batch_size=batch_size)
nn.setActivations(['brelu', 'linear'])
nn.setMaskPr({1: 2})
nn.setAlpha(0.001)
nn.scale(0.1)

for j in range(iterations):
    error, correct_cnt = (0.0, 0)
    for i in range(int(len(images) / batch_size)):
        batch_start, batch_end = ((i * batch_size), ((i + 1) * batch_size))
        prediction = nn.learn(images[batch_start:batch_end],
                              labels[batch_start:batch_end])
        # vprint(i, nn, suffix='a', quit=True)
        # vprint(i, nn.dropout_masks[1], suffix='m', quit=True)
        # nn.train(labels[batch_start:batch_end])
        # vprint(i, nn, stage='b', quit=True)

        error += np.sum((labels[batch_start:batch_end] - prediction) ** 2)
        for k in range(batch_size):
            correct_cnt += int(
Ejemplo n.º 2
0
from verbosePrint import vprint
import verbosePrint

np.random.seed(1)

batch_size = 128
iterations = 300

cm = ConvolutionMatrix(rows=9, cols=16, shapes=((28, 28), (3, 3)))
hLen = cm.outputLength()
nn = NNet(sizes=[784, hLen, 10], batch_size=batch_size)
nn.replaceLayer(0, cm)
nn.setActivations(['tanh', 'softmax'])
nn.setMaskPr({1: 2})
nn.setAlpha(2)
nn.scale(0.01, 0)
nn.scale(0.1, 1)

# vprint(0, nn, quit=True)
# params = (test_images, test_labels)
# nn.checkup(*params)
for j in range(0, iterations + 1):
    correct_cnt = 0
    for i in range(int(len(images) / batch_size)):
        batch_start, batch_end = ((i * batch_size), ((i + 1) * batch_size))
        prediction = nn.learn(images[batch_start:batch_end],
                              labels[batch_start:batch_end])
        # vprint(i, nn, suffix='a', quit=True)
        # vprint(i, nn.dropout_masks[1], suffix='m', quit=True)

        for k in range(batch_size):