示例#1
0
 def test_set_lr(self):
     lin = Linear(indim=10, dim=15)
     lin.set_lr(0.123)
     o = lin(Val(0))
     #print ["{}: {}".format(x, x.lrmul) for x in o.allparams]
     for x in o.allparams:
         self.assertEqual(x.lrmul, 0.123)
示例#2
0
 def __init__(self,
              layers,
              softmaxoutblock=None,
              innerdim=None,
              attention=None,
              inconcat=True,
              outconcat=False,
              dropout=False,
              **kw):
     super(SeqDecoder, self).__init__(**kw)
     self.embedder = layers[0]
     self.block = RecStack(*layers[1:])
     self.outdim = innerdim
     self.attention = attention
     self.inconcat = inconcat
     self.outconcat = outconcat
     self._mask = False
     self._attention = None
     assert (isinstance(self.block, ReccableBlock))
     if softmaxoutblock is None:  # default softmax out block
         sm = Softmax()
         self.lin = Linear(indim=self.outdim,
                           dim=self.embedder.indim,
                           dropout=dropout)
         self.softmaxoutblock = asblock(lambda x: sm(self.lin(x)))
     elif softmaxoutblock is False:
         self.softmaxoutblock = asblock(lambda x: x)
     else:
         self.softmaxoutblock = softmaxoutblock
示例#3
0
 def __init__(self, numin, *dims, **kw):
     super(Model, self).__init__(**kw)
     self.layers = []
     dims = list(dims)
     dims = [numin] + dims
     for i in range(1, len(dims)):
         self.layers.append(Linear(indim=dims[i - 1], dim=dims[i]))
         self.layers.append(Tanh())
     self.layers[-1] = Softmax()
示例#4
0
 def __init__(self,
              indim=None,
              innerdim=None,
              outvocsize=None,
              dropout=None,
              **kw):
     super(SoftMaxOut, self).__init__(**kw)
     self.indim, self.innerdim, self.outvocsize = indim, innerdim, outvocsize
     self.lin1 = Linear(indim=indim, dim=innerdim, dropout=dropout)
     self.lin2 = MatDot(indim=innerdim, dim=outvocsize)
示例#5
0
class TestLinear(TestCase):
    def setUp(self):
        self.linear = Linear(indim=10, dim=15)
        self.data = np.random.random((100, 10))
        self.out = self.linear.predict(self.data)

    def test_linear_shapes(self):
        self.assertEqual(self.out.shape, (100, 15))

    def test_linear_output(self):
        self.assertTrue(np.allclose(self.out, np.dot(self.data, self.linear.W.d.get_value()) + self.linear.b.d.get_value()))
示例#6
0
class TestLinear(TestCase):
    def setUp(self):
        self.linear = Linear(indim=10, dim=15)
        self.data = np.random.random((100, 10))
        self.out = self.linear.predict(self.data)

    def test_linear_shapes(self):
        self.assertEqual(self.out.shape, (100, 15))

    def test_linear_output(self):
        self.assertTrue(
            np.allclose(
                self.out,
                np.dot(self.data, self.linear.W.d.get_value()) +
                self.linear.b.d.get_value()))

    def test_set_lr(self):
        lin = Linear(indim=10, dim=15)
        lin.set_lr(0.123)
        o = lin(Val(0))
        #print ["{}: {}".format(x, x.lrmul) for x in o.allparams]
        for x in o.allparams:
            self.assertEqual(x.lrmul, 0.123)

    def test_get_params(self):
        lin = Linear(indim=10, dim=15)
        params = {lin.W, lin.b}
        self.assertEqual(params, lin.get_params())

    def test_multilevel_set_lr(self):
        l1 = Linear(10, 11)
        l2 = Linear(11, 12)
        l3 = Linear(12, 13)
        s = stack(l1, l2, l3)
        s[1].set_lr(0.5)
        s[2].set_lr(0.1)
        o = s(Val(0))
        l1o = s[0](Val(0))
        l2o = s[1](Val(0))
        l3o = s[2](Val(0))
        print["{}: {}".format(x, x.lrmul) for x in o.allparams]
        for x in o.allparams:
            if x in l1o.allparams:
                self.assertEqual(x.lrmul, 1.0)
            elif x in l2o.allparams:
                self.assertEqual(x.lrmul, 0.5)
            elif x in l3o.allparams:
                self.assertEqual(x.lrmul, 0.1)
        s.set_lr(0.21)
        o = s(Val(0))
        print["{}: {}".format(x, x.lrmul) for x in o.allparams]
        for x in o.allparams:
            self.assertEqual(x.lrmul, 0.21)
示例#7
0
 def __init__(self,
              charenc=None,
              wordemb=None,
              maskid=-1,
              scalayers=1,
              scadim=100,
              encdim=100,
              outdim=None,
              scabidir=False,
              encbidir=False,
              enclayers=1,
              **kw):
     super(BinoEncoder, self).__init__(**kw)
     self.charenc = charenc
     self.wordemb = wordemb
     self.maskid = maskid
     self.bidir = encbidir  # TODO
     outdim = encdim if outdim is None else outdim
     self.outdim = outdim  # TODO
     self.outerpol = SimpleSeq2Sca(inpemb=False,
                                   inpembdim=charenc.outdim +
                                   wordemb.outdim,
                                   innerdim=[scadim] * scalayers,
                                   bidir=scabidir)
     self.leftenc = RNNSeqEncoder(inpemb=False,
                                  inpembdim=charenc.outdim + wordemb.outdim,
                                  innerdim=[encdim] * enclayers,
                                  bidir=encbidir,
                                  maskid=maskid)
     self.rightenc = RNNSeqEncoder(inpemb=False,
                                   inpembdim=charenc.outdim +
                                   wordemb.outdim,
                                   innerdim=[encdim] * enclayers,
                                   bidir=encbidir,
                                   maskid=maskid)
     self.leftlin = Linear(self.leftenc.outdim, outdim)
     self.rightlin = Linear(self.rightenc.outdim, outdim)
示例#8
0
class TestLinear(TestCase):
    def setUp(self):
        self.linear = Linear(indim=10, dim=15)
        self.data = np.random.random((100, 10))
        self.out = self.linear.predict(self.data)

    def test_linear_shapes(self):
        self.assertEqual(self.out.shape, (100, 15))

    def test_linear_output(self):
        self.assertTrue(
            np.allclose(
                self.out,
                np.dot(self.data, self.linear.W.d.get_value()) +
                self.linear.b.d.get_value()))
示例#9
0
 def test_multilevel_set_lr(self):
     l1 = Linear(10, 11)
     l2 = Linear(11, 12)
     l3 = Linear(12, 13)
     s = stack(l1, l2, l3)
     s[1].set_lr(0.5)
     s[2].set_lr(0.1)
     o = s(Val(0))
     l1o = s[0](Val(0))
     l2o = s[1](Val(0))
     l3o = s[2](Val(0))
     print["{}: {}".format(x, x.lrmul) for x in o.allparams]
     for x in o.allparams:
         if x in l1o.allparams:
             self.assertEqual(x.lrmul, 1.0)
         elif x in l2o.allparams:
             self.assertEqual(x.lrmul, 0.5)
         elif x in l3o.allparams:
             self.assertEqual(x.lrmul, 0.1)
     s.set_lr(0.21)
     o = s(Val(0))
     print["{}: {}".format(x, x.lrmul) for x in o.allparams]
     for x in o.allparams:
         self.assertEqual(x.lrmul, 0.21)
示例#10
0
 def test_subclasses(self):
     b = Linear(10, 5)
     p = b.get_probe()
     print p
示例#11
0
 def setUp(self):
     self.linear = Linear(indim=10, dim=15)
     self.data = np.random.random((100, 10))
     self.out = self.linear.predict(self.data)
示例#12
0
 def setUp(self):
     self.linear = Linear(indim=10, dim=15)
     self.data = np.random.random((100, 10))
     self.out = self.linear.predict(self.data)
示例#13
0
 def test_update_propagation_through_basic_blocks(self):
     x = Input(ndim=2, dtype="float32")
     x.push_updates({"a": "b"})
     y = Linear(5, 6)(x)
     self.assertEqual(y.allupdates, x.allupdates)
示例#14
0
 def test_get_params(self):
     lin = Linear(indim=10, dim=15)
     params = {lin.W, lin.b}
     self.assertEqual(params, lin.get_params())