예제 #1
0
class TestPersistance(unittest.TestCase):

    def setUp(self):
        X, Y = etalon
        self.net = BackpropNetwork(input_shape=(4,), layers=[
            DenseLayer(30, activation="sigmoid"),
            DenseLayer(3, activation="softmax")
        ], cost="xent", optimizer="sgd")
        self.net.fit(X, Y, batch_size=len(X)//2, epochs=3, validation=etalon)
        self.cost1, self.acc1 = self.net.evaluate(*etalon)

    def test_dense_with_pickle(self):
        sleepy = pickle.dumps(self.net)
        netcopy = pickle.loads(sleepy)
        self._check_persistence_ok(netcopy)

    def test_dense_with_capsule(self):
        persistance.Capsule.encapsulate(self.net, dumppath="./PersistanceTest.bro")
        netcopy = persistance.load("./PersistanceTest.bro")
        self._check_persistence_ok(netcopy)

    def _check_persistence_ok(self, netcopy):
        cost2, acc2 = netcopy.evaluate(*etalon)

        self.assertAlmostEqual(self.cost1, cost2)
        self.assertAlmostEqual(self.acc1, acc2)
        self.assertFalse(self.net is netcopy)
예제 #2
0
def test_fnn_2layers():
    mnist = MNIST("../..", return_type="numpy")
    train_images, train_labels = mnist.load_training()
    test_images, test_labels = mnist.load_testing()
    train_labels = create_one_hot_labels(train_labels)
    test_labels = create_one_hot_labels(test_labels)
    inshape, outshape = train_images.shape[1:], train_labels.shape[1:]
    print ("%s -> %s" %(inshape, outshape))
    layerstack = [
                 DenseLayer(30, activation="relu"),
                 DenseLayer(outshape, activation="softmax")
                 ]
    network = BackpropNetwork(input_shape=inshape, layerstack=layerstack, cost="xent", optimizer="sgd")
    network.fit(train_images, train_labels, epochs=1, batch_size=100, verbose=0)

    gcsuite = GradientCheck(network, epsilon=1e-5)
    gcsuite.run(test_images, test_labels)
예제 #3
0
def test_fnn_softmax_1layer_full():
    mnist = MNIST("../..", return_type="numpy")
    train_images, train_labels = mnist.load_training()
    test_images, test_labels = mnist.load_testing()
    train_labels = create_one_hot_labels(train_labels)
    np.set_printoptions(threshold=np.inf)
    inshape, outshape = train_images.shape[1:], train_labels.shape[1:]
    print ("%s -> %s" %(inshape, outshape))
    layerstack = [DenseLayer(outshape, activation="softmax")]
    network = BackpropNetwork(input_shape=inshape, layerstack=layerstack, cost="xent", optimizer="sgd", eta=1e-3)
    network.fit(train_images, train_labels, epochs=1, batch_size=100, shuffle=False, verbose=1)
    
    #gcsuite = GradientCheck(network, epsilon=1e-5)
    #gcsuite.run(test_images[5000:5500], test_labels[5000:5500])
    probs = network.predict(test_images)
    percision = (np.sum(np.argmax(probs, axis=1) == test_labels) / float(len(probs)))
    print("percision={}, len={}".format(percision, len(probs)))
예제 #4
0
from csxdata import roots, CData

from brainforge import BackpropNetwork
from brainforge.layers import DenseLayer
from brainforge.optimization import SGD

mnist = CData(roots["misc"] + "mnist.pkl.gz", cross_val=10000, fold=False)
inshape, outshape = mnist.neurons_required

network = BackpropNetwork(input_shape=inshape,
                          layerstack=[
                              DenseLayer(30, activation="sigmoid"),
                              DenseLayer(outshape, activation="softmax")
                          ],
                          cost="xent",
                          optimizer=SGD(eta=3.))

network.fit(*mnist.table("learning"), validation=mnist.table("testing"))
예제 #5
0
from brainforge.util import etalon
from brainforge import LayerStack, BackpropNetwork
from brainforge.layers import DenseLayer, DropOut

ls = LayerStack(
    (4, ),
    layers=[
        DenseLayer(120, activation="tanh"),
        # DropOut(0.5),
        DenseLayer(3, activation="softmax")
    ])

net = BackpropNetwork(ls, cost="xent", optimizer="momentum")
costs = net.fit(*etalon, epochs=300, validation=etalon, verbose=1)
예제 #6
0
from brainforge import BackpropNetwork
from brainforge.layers import DenseLayer
from brainforge.gradientcheck import GradientCheck
from brainforge.util import etalon

X, Y = etalon
inshape, outshape = X.shape[1:], Y.shape[1:]

network = BackpropNetwork(input_shape=inshape,
                          layerstack=[
                              DenseLayer(10, activation="sigmoid"),
                              DenseLayer(outshape, activation="softmax")
                          ],
                          cost="xent",
                          optimizer="sgd")
network.fit(X[5:], Y[5:], epochs=1, batch_size=len(X) - 5, verbose=0)

gcsuite = GradientCheck(network, epsilon=1e-3)
gcsuite.run(X[:5], Y[:5])
예제 #7
0
from csxdata import Sequence, roots

from brainforge import BackpropNetwork
from brainforge.layers import LSTM, DenseLayer
from brainforge.optimization import RMSprop

data = Sequence(roots["txt"] + "petofi.txt", n_gram=1, timestep=5)
inshape, outshape = data.neurons_required
net = BackpropNetwork(input_shape=inshape,
                      layerstack=[
                          LSTM(60, activation="tanh"),
                          DenseLayer(60, activation="tanh"),
                          DenseLayer(outshape, activation="softmax")
                      ],
                      cost="xent",
                      optimizer=RMSprop(eta=0.01))

net.fit(*data.table("learning"), validation=data.table("testing"))