def __init__(self, inpemb, encdim=100, scadim=100, maskid=0, bidir=False, scalayers=1, enclayers=1, outdim=100, **kw): super(CustomSeq2Pair, self).__init__(**kw) self.tosca = SimpleSeq2Sca(inpemb=inpemb, inpembdim=inpemb.outdim, innerdim=scadim, maskid=maskid, bidir=bidir, layers=scalayers) self.subjenc = SimpleSeq2Vec(inpemb=inpemb, inpembdim=inpemb.outdim, innerdim=encdim, maskid=maskid, bidir=bidir, layers=enclayers) self.predenc = SimpleSeq2Vec(inpemb=inpemb, inpembdim=inpemb.outdim, innerdim=encdim, maskid=maskid, bidir=bidir, layers=enclayers) self.subjmd = MatDot(self.subjenc.outdim, outdim) self.predmd = MatDot(self.predenc.outdim, outdim)
def __init__(self, inpvocsize=400, inpembdim=None, outvocsize=100, outembdim=None, encdim=100, decdim=100, attdim=100, bidir=False, rnu=GRU, statetrans=None, vecout=False, inconcat=True, outconcat=False, **kw): encinnerdim = [encdim] if not issequence(encdim) else encdim decinnerdim = [decdim] if not issequence(decdim) else decdim self.enclayers, lastencinnerdim = \ self.getenclayers(inpembdim, inpvocsize, encinnerdim, bidir, rnu) self.declayers = \ self.getdeclayers(outembdim, outvocsize, lastencinnerdim, decinnerdim, rnu, inconcat) # attention lastdecinnerdim = decinnerdim[-1] argdecinnerdim = lastdecinnerdim if outconcat is False else lastencinnerdim + lastdecinnerdim attgen = LinearGateAttentionGenerator(indim=lastencinnerdim + lastdecinnerdim, attdim=attdim) attcon = WeightedSumAttCon() if statetrans is True: if lastencinnerdim != lastdecinnerdim: # state shape mismatch statetrans = MatDot(lastencinnerdim, lastdecinnerdim) elif statetrans == "matdot": statetrans = MatDot(lastencinnerdim, lastdecinnerdim) super(SimpleSeqEncDecAtt, self).__init__(self.enclayers, self.declayers, attgen, attcon, argdecinnerdim, statetrans=statetrans, vecout=vecout, inconcat=inconcat, outconcat=outconcat, **kw)
def innerf(encstates, encspec, decspec): decspec = reduce(lambda x, y: list(x) + list(y), decspec, []) encspec = reduce(lambda x, y: list(x) + list(y), encspec, []) assert (len(decspec) == len(encspec)) ret = [] for i in range(len(encspec)): if encspec[i][0] == "state" and decspec[i][0] == "state": if decspec[i][1][0] != encspec[i][1][ 0] or statetrans == "matdot": t = MatDot(encspec[i][1][0], decspec[i][1][0]) else: t = asblock(lambda x: x) elif encspec[i][0] == decspec[i][0]: t = None else: raise Exception() ret.append(t) assert (len(encstates) == len(ret)) out = [] for encstate, rete in zip(encstates, ret): if rete is None: out.append(None) else: out.append(rete(encstate)) return out
def __init__(self, layers, softmaxoutblock=None, innerdim=None, attention=None, inconcat=True, outconcat=False, **kw): super(SeqDecoderOld, self).__init__(**kw) self.embedder = layers[0] self.block = RecStack(*layers[1:]) self.outdim = innerdim self.attention = attention self.inconcat = inconcat self.outconcat = outconcat self._mask = False self._attention = None assert (isinstance(self.block, ReccableBlock)) if softmaxoutblock is None: # default softmax out block sm = Softmax() self.lin = MatDot(indim=self.outdim, dim=self.embedder.indim) self.softmaxoutblock = asblock(lambda x: sm(self.lin(x))) elif softmaxoutblock is False: self.softmaxoutblock = asblock(lambda x: x) else: self.softmaxoutblock = softmaxoutblock
def __init__(self, indim=None, innerdim=None, outvocsize=None, dropout=None, **kw): super(SoftMaxOut, self).__init__(**kw) self.indim, self.innerdim, self.outvocsize = indim, innerdim, outvocsize self.lin1 = Linear(indim=indim, dim=innerdim, dropout=dropout) self.lin2 = MatDot(indim=innerdim, dim=outvocsize)
class TestMatDot(TestCase): def setUp(self): self.matdot = MatDot(indim=10, dim=15) self.data = np.random.random((100, 10)) self.matdotout = self.matdot.predict(self.data) def test_matdot_shapes(self): self.assertEqual(self.matdotout.shape, (100, 15)) def test_matdot_output(self): self.assertTrue(np.allclose(self.matdotout, np.dot(self.data, self.matdot.W.d.get_value())))
class TestMatDot(TestCase): def setUp(self): self.matdot = MatDot(indim=10, dim=15) self.data = np.random.random((100, 10)) self.matdotout = self.matdot.predict(self.data) def test_matdot_shapes(self): self.assertEqual(self.matdotout.shape, (100, 15)) def test_matdot_output(self): self.assertTrue( np.allclose(self.matdotout, np.dot(self.data, self.matdot.W.d.get_value())))
def __init__(self, indim=100, outdim=100, **kw): outl = MatDot(indim=indim, dim=outdim) super(SimpleVec2Idx, self).__init__(outl, **kw)
def setUp(self): self.matdot = MatDot(indim=10, dim=15) self.data = np.random.random((100, 10)) self.matdotout = self.matdot.predict(self.data)
def setlin2(self, v): self.lin2 = MatDot(indim=self.indim, dim=self.innerdim, value=v)
def __init__(self, inner, **kw): super(ConcatLeftBlock, self).__init__(**kw) self.trans = MatDot(inner.outdim, inner.outdim, init="glorotuniform") \ if inner.bidir else lambda x: x self.inner = inner
def test_set_lr(self): self.matdot = MatDot(indim=10, dim=15)