Esempio n. 1
0
def run():
    fruits = CData(gyumpath, gyumindeps, feature=TAXLEVEL, cross_val=CROSSVAL)
    fruits.transformation = (TRANSFORMATION, TRANSFORMATION_PARAM)

    network = build_net(*fruits.neurons_required)

    testing = fruits.table("testing")
    zsind = CData(zsindpath, zsindeps, cross_val=0.0, feature=TAXLEVEL)
    vx, vy = zsind.learning, zsind.lindeps
    vx = fruits.transformation(vx)
    vy = fruits.embed(vy)

    initc, initacc = network.evaluate(*testing, verbose=0)
    initc, initacc = round(initc, 5), round(initacc, 5)
    print("Initial cost: {}\tacc: {}".format(initc, initacc))

    X, y = fruits.table("learning")
    network.fit(X,
                y,
                batch_size=20,
                nb_epoch=400,
                validation_data=testing,
                verbose=0)
    tacc = network.evaluate(*testing, batch_size=fruits.n_testing,
                            verbose=0)[-1]
    vacc = network.evaluate(vx, vy, verbose=0)[-1]
    # batchgen = fruits.batchgen(100, infinite=True)
    # log = network.fit_generator(batchgen, fruits.N, nb_epoch=15, validation_data=valid, verbose=verbose)
    print("T: {}\tV: {}".format(tacc, vacc))
    return tacc, vacc
Esempio n. 2
0
class TestNetwork(unittest.TestCase):
    def setUp(self):
        self.data = CData(mnist_tolearningtable(roots["misc"] + "mnist.pkl.gz",
                                                fold=False),
                          headers=None)
        self.data.transformation = "std"
        self.X, self.Y = self.data.table("testing", m=5, shuff=False)

        self.net = BackpropNetwork(self.data.neurons_required[0],
                                   name="NumGradTestNetwork")
        self.net.add(DenseLayer(30, activation="sigmoid"))

    def test_mse_with_sigmoid_output(self):
        self.net.add(
            DenseLayer(self.data.neurons_required[1], activation="sigmoid"))
        self.net.finalize(cost="mse", optimizer="sgd")
        self._run_numerical_gradient_test()

    def test_xent_with_sigmoid_output(self):
        self.net.add(
            DenseLayer(self.data.neurons_required[1], activation="sigmoid"))
        self.net.finalize(cost="xent", optimizer="sgd")
        self._run_numerical_gradient_test()

    def test_xent_with_softmax_output(self):
        self.net.add(
            DenseLayer(self.data.neurons_required[1], activation="softmax"))
        self.net.finalize(cost="xent", optimizer="sgd")
        self._run_numerical_gradient_test()

    def _run_numerical_gradient_test(self):
        self.net.fit(*self.data.table("learning", m=20),
                     batch_size=20,
                     epochs=1,
                     verbose=0)

        numerical = numerical_gradients(self.net, self.X, self.Y)
        analytical = analytical_gradients(self.net, self.X, self.Y)
        diff = analytical - numerical
        error = norm(diff) / max(norm(numerical), norm(analytical))

        dfstr = "{0: .4f}".format(error)

        self.assertLess(error, 1e-2,
                        "FATAL ERROR, {} (relerr) >= 1e-2".format(dfstr))
        self.assertLess(error, 1e-4,
                        "ERROR, 1e-2 > {} (relerr) >= 1e-4".format(dfstr))
        self.assertLess(error, 1e-7,
                        "SUSPICIOUS, 1e-4 > {} (relerr) >= 1e-7".format(dfstr))
Esempio n. 3
0
def full_training(validate=True, dump_weights=False):
    fruits = CData(gyumpath, gyumindeps, feature=TAXLEVEL, cross_val=0.0)
    fruits.transformation = (TRANSFORMATION, TRANSFORMATION_PARAM)

    network = build_net(*fruits.neurons_required)

    X, y = fruits.table("learning")
    network.fit(X, y, batch_size=30, nb_epoch=500, verbose=0)

    if dump_weights:
        weights = network.layers[0].get_weights()

        wghts = open("weights.csv", "w")
        wghts.write("\n".join([
            "\t".join([str(float(cell)) for cell in line])
            for line in weights[0].T
        ]).replace(".", ","))
        wghts.close()

    if validate:
        vx, vy = CData(zsindpath, zsindeps, cross_val=0.0, feature=TAXLEVEL)
        vx = fruits.transformation(vx)
        vy = fruits.embed(vy)
        vacc = network.evaluate(vx, vy, batch_size=len(vy), verbose=0)[-1]
        probs = network.predict_proba(vx, verbose=0)
        preds = network.predict_classes(vx, verbose=0)
        print("ANN validation accuracy:", vacc)
        return probs, preds, vy, fruits
Esempio n. 4
0
class TestEmbedding(unittest.TestCase):
    """
    Dear Embedding Wrapper Classes,

    I would like you to:
    + create embeddings from categories
    ++ create OneHot embedding
    ++ create random embedding into n dimensions
    + transfrom any category label into the appropriate embedding
    - translate an embedding back to readable label or dummycode
    """
    def setUp(self):
        self.X_, self.y_, headers = parse_csv(etalonroot + "/input.csv")

        self.data = CData((self.X_, self.y_), cross_val=0)

    def test_embedding_then_reverting_to_onehot_doesnt_break_shapes(self):
        self.data.reset_data(shuff=False)
        self.data.crossval = 0
        self.data.embedding = 10
        self.assertEqual(
            self.data.embedding, "embedding",
            "<embedding> setter is faulty! (got {})".format(
                self.data.embedding))
        X, y = self.data.table()
        self.assertEqual(
            y.shape, (10, 10),
            "Embedding of independent variables went wrong! (got shape {})".
            format(y.shape))

        del self.data.embedding
        self.assertEqual(
            self.data.embedding, "onehot",
            "<embedding> deleter is faulty! (got {})".format(
                self.data.embedding))
        X, y = self.data.table()
        self.assertEqual(
            y.shape, (10, 3),
            "OneHot of independent variables went wrong! (got shape {})".
            format(y.shape))
Esempio n. 5
0
from csxdata import roots, CData

from brainforge import BackpropNetwork
from brainforge.layers import DenseLayer
from brainforge.optimization import SGD

mnist = CData(roots["misc"] + "mnist.pkl.gz", cross_val=10000, fold=False)
inshape, outshape = mnist.neurons_required

network = BackpropNetwork(input_shape=inshape,
                          layerstack=[
                              DenseLayer(30, activation="sigmoid"),
                              DenseLayer(outshape, activation="softmax")
                          ],
                          cost="xent",
                          optimizer=SGD(eta=3.))

network.fit(*mnist.table("learning"), validation=mnist.table("testing"))
Esempio n. 6
0
from csxdata import CData, roots

from brainforge import BackpropNetwork
from brainforge.layers import ConvLayer, PoolLayer, Flatten, DenseLayer, Activation
from brainforge.optimization import RMSprop

data = CData(roots["misc"] + "mnist.pkl.gz", cross_val=10000, fold=True)
ins, ous = data.neurons_required
net = BackpropNetwork(input_shape=ins, layerstack=[
    ConvLayer(3, 8, 8, compiled=False),
    PoolLayer(3, compiled=False), Activation("tanh"),
    Flatten(), DenseLayer(60, activation="tanh"),
    DenseLayer(ous, activation="softmax")
], cost="xent", optimizer=RMSprop(eta=0.01))

net.fit_generator(data.batchgen(bsize=20, infinite=True), lessons_per_epoch=60000, epochs=30,
                  validation=data.table("testing"))