Exemple #1
0
    def test_optimizer_override(self):

        ls = MS.GradientDescent(lr=0.5)
        cost = MC.NegativeLogLikelihood()

        inp = ML.Input(1, 'inp')
        h = ML.Hidden(5,
                      activation=MA.Tanh(),
                      learningScenari=[MS.Fixed("b")],
                      name="h")
        o = ML.SoftmaxClassifier(
            2,
            learningScenari=[MS.GradientDescent(lr=0.5),
                             MS.Fixed("W")],
            cost=cost,
            name="out")
        net = inp > h > o
        net.init()

        ow = o.getP('W').getValue()
        ob = o.getP('b').getValue()
        hw = h.getP('W').getValue()
        hb = h.getP('b').getValue()
        for x in xrange(1, 10):
            net["out"].train({
                "inp.inputs": [[1]],
                "out.targets": [1]
            })["out.drive.train"]

        self.assertTrue(sum(ow[0]) == sum(o.getP('W').getValue()[0]))
        self.assertTrue(sum(ob) != sum(o.getP('b').getValue()))
        self.assertTrue(sum(hb) == sum(h.getP('b').getValue()))
        self.assertTrue(sum(hw[0]) != sum(h.getP('W').getValue()[0]))
Exemple #2
0
    def test_embedding(self):
        """the first 3 and the last 3 should be diametrically opposed"""
        data = [[0], [1], [2], [3], [4], [5]]
        targets = [0, 0, 0, 1, 1, 1]

        ls = MS.GradientDescent(lr=0.5)
        cost = MC.NegativeLogLikelihood()

        emb = ML.Embedding(1, 2, len(data), learningScenario=ls, name="emb")
        o = ML.SoftmaxClassifier(2,
                                 learningScenario=MS.Fixed(),
                                 costObject=cost,
                                 name="out")
        net = emb > o

        miniBatchSize = 2
        for i in xrange(2000):
            for i in xrange(0, len(data), miniBatchSize):
                net.train(o,
                          emb=data[i:i + miniBatchSize],
                          targets=targets[i:i + miniBatchSize])

        embeddings = emb.getEmbeddings()
        for i in xrange(0, len(data) / 2):
            v = numpy.dot(embeddings[i], embeddings[i + len(data) / 2])
            self.assertTrue(v < -1)
Exemple #3
0
    def test_embedding(self):
        """the first 3 and the last 3 should be diametrically opposed"""
        data = [[0], [1], [2], [3], [4], [5]]
        targets = [0, 0, 0, 1, 1, 1]

        ls = MS.GradientDescent(lr=0.5)
        cost = MC.NegativeLogLikelihood()

        inp = ML.Input(1, 'inp')
        emb = ML.Embedding(nbDimensions=2,
                           dictSize=len(data),
                           learningScenari=[ls],
                           name="emb")
        o = ML.SoftmaxClassifier(2,
                                 learningScenari=[MS.Fixed()],
                                 cost=cost,
                                 name="out")
        net = inp > emb > o
        net.init()

        miniBatchSize = 2
        for i in xrange(2000):
            for i in xrange(0, len(data), miniBatchSize):
                net["out"].train({
                    "inp.inputs": data[i:i + miniBatchSize],
                    "out.targets": targets[i:i + miniBatchSize]
                })["out.drive.train"]

        embeddings = emb.getP("embeddings").getValue()
        for i in xrange(0, len(data) / 2):
            v = numpy.dot(embeddings[i], embeddings[i + len(data) / 2])
            self.assertTrue(v < -1)
Exemple #4
0
MSET.VERBOSE = False

#The first 3 and the last 3 should end up diametrically opposed
data = [[0, 0], [1, 0], [2, 0], [3, 0], [4, 0], [5, 0]]
targets = [0, 0, 0, 1, 1, 1]

ls = MS.GradientDescent(lr=0.5)
cost = MC.NegativeLogLikelihood()

emb = ML.Embedding(2,
                   nbDimentions=2,
                   dictSize=len(data),
                   learningScenario=ls,
                   name="emb")
o = ML.SoftmaxClassifier(2,
                         learningScenario=MS.Fixed(),
                         costObject=cost,
                         name="out")
net = emb > o

miniBatchSize = 2
print "before:"
net.init()

print emb.getEmbeddings()

for i in xrange(2000):
    for i in xrange(0, len(data), miniBatchSize):
        net.train(o,
                  emb=data[i:i + miniBatchSize],
                  targets=targets[i:i + miniBatchSize])