def init(self): #memory wencpg = WordEncoderPlusGlove(numchars=self.numchars, numwords=self.numwords, encdim=self.wordencdim, embdim=self.wordembdim, embtrainfrac=0.0, glovepath=self.glovepath) self.memenco = SeqEncoder( wencpg, GRU(dim=self.wordembdim + self.wordencdim, innerdim=self.encinnerdim) ) entemb = VectorEmbed(indim=self.outdim, dim=self.entembdim) self.mempayload = ConcatBlock(entemb, self.memenco) self.memblock = MemoryBlock(self.mempayload, self.memdata, indim=self.outdim, outdim=self.encinnerdim+self.entembdim) #encoder wencpg2 = WordEncoderPlusGlove(numchars=self.numchars, numwords=self.numwords, encdim=self.wordencdim, embdim=self.wordembdim, embtrainfrac=0.0, glovepath=self.glovepath) self.enc = SeqEncoder( wencpg2, GRU(dim=self.wordembdim + self.wordencdim, innerdim=self.encinnerdim) ) #decoder entemb2 = VectorEmbed(indim=self.outdim, dim=self.entembdim) self.softmaxoutblock = stack(self.memaddr(self.memblock, indim=self.decinnerdim, memdim=self.memblock.outdim, attdim=self.attdim), Softmax()) self.dec = SeqDecoder( [entemb2, #self.memblock, GRU(dim=entemb.outdim + self.encinnerdim, innerdim=self.decinnerdim), # GRU(dim=self.memblock.outdim + self.encinnerdim, innerdim=self.decinnerdim), ], inconcat=True, innerdim=self.decinnerdim, softmaxoutblock=self.softmaxoutblock )
def __init__(self, baseemb, *layersforencs, **kw): super(SeqStar2Vec, self).__init__(**kw) self.maskid = None if "maskid" not in kw else kw["maskid"] self.encoders = [] atbase = True for layers in layersforencs: if not issequence(layers): layers = [layers] if atbase: enc = SeqEncoder(baseemb, *layers).maskoptions(MaskMode.NONE) atbase = False else: enc = SeqEncoder(None, *layers).maskoptions(MaskMode.NONE) self.encoders.append(enc)
def __init__(self, wordembdim=50, wordencdim=100, innerdim=200, outdim=1e4, numwords=4e5, numchars=128, glovepath=None, **kw): super(FBBasicCompositeEncoder, self).__init__(**kw) self.indim = wordembdim + wordencdim self.outdim = outdim self.wordembdim = wordembdim self.wordencdim = wordencdim self.innerdim = innerdim self.enc = SeqEncoder( WordEncoderPlusGlove(numchars=numchars, numwords=numwords, encdim=self.wordencdim, embdim=self.wordembdim, embtrainfrac=0.0, glovepath=glovepath), GRU(dim=self.wordembdim + self.wordencdim, innerdim=self.innerdim)) self.out = Lin(indim=self.innerdim, dim=self.outdim)
def __init__(self, enclayers, declayers, attgen, attcon, decinnerdim, statetrans=None, vecout=False, inconcat=True, outconcat=False, **kw): enc = SeqEncoder(*enclayers)\ .with_outputs()\ .with_mask()\ .maskoptions(-1, MaskMode.AUTO, MaskSetMode.ZERO) smo = False if vecout else None dec = SeqDecoder(declayers, attention=Attention(attgen, attcon), innerdim=decinnerdim, inconcat=inconcat, softmaxoutblock=smo, outconcat=outconcat) super(SeqEncDecAtt, self).__init__(enc, dec, statetrans=statetrans, **kw)
def __init__(self, indim=400, inpembdim=50, inpemb=None, mode="concat", innerdim=100, numouts=1, maskid=0, bidir=False, maskmode=MaskMode.NONE, **kw): super(SimpleSeq2MultiVec, self).__init__(**kw) if inpemb is None: if inpembdim is None: inpemb = IdxToOneHot(indim) inpembdim = indim else: inpemb = VectorEmbed(indim=indim, dim=inpembdim) elif inpemb is False: inpemb = None else: inpembdim = inpemb.outdim if not issequence(innerdim): innerdim = [innerdim] innerdim[-1] += numouts rnn, lastdim = self.makernu(inpembdim, innerdim, bidir=bidir) self.outdim = lastdim * numouts self.maskid = maskid self.inpemb = inpemb self.numouts = numouts self.mode = mode if not issequence(rnn): rnn = [rnn] self.enc = SeqEncoder(inpemb, *rnn).maskoptions(maskid, maskmode) self.enc.all_outputs()
def __init__(self, invocsize=27, outvocsize=500, innerdim=300, **kw): super(seq2idx, self).__init__(**kw) self.invocsize = invocsize self.outvocsize = outvocsize self.innerdim = innerdim self.enc = SeqEncoder( VectorEmbed(indim=self.invocsize, dim=self.invocsize), GRU(dim=self.invocsize, innerdim=self.innerdim)) self.outlin = Lin(indim=self.innerdim, dim=self.outvocsize)
def test_memory_block_with_seq_encoder_dynamic_fail(self): invocabsize = 5 encdim = 13 gru = GRU(dim=invocabsize, innerdim=encdim) payload = SeqEncoder(IdxToOneHot(vocsize=invocabsize), gru) dynmemb = MemoryBlock(payload, outdim=encdim) idxs = [0, 2, 5] #dynmemb.predict(idxs) self.assertRaises(AssertionError, lambda: dynmemb.predict(idxs))
def test_output_shape_LSTM(self): batsize = 100 seqlen = 5 dim = 50 indim = 13 m = SeqEncoder(IdxToOneHot(13), LSTM(dim=indim, innerdim=dim)) data = np.random.randint(0, indim, (batsize, seqlen)).astype("int32") mpred = m.predict(data) self.assertEqual(mpred.shape, (batsize, dim))
def __init__(self, indim=220, outdim=200, maskid=0, **kw): # indim is number of characters super(WordEncoder, self).__init__(**kw) self.enc = SeqEncoder(IdxToOneHot(indim), GRU(dim=indim, innerdim=outdim)).maskoptions( maskid, MaskMode.AUTO)
def setUp(self): dim = 50 self.outdim = 100 batsize = 1000 seqlen = 19 self.enc = SeqEncoder(None, GRU(dim=dim, innerdim=self.outdim)) self.enc = self.doswitches(self.enc) self.data = np.random.random((batsize, seqlen, dim)).astype("float32") self.p = self.enc.predict self.out = self.p(self.data)
def __init__(self, inpemb, enclayers, maskid=0, pool=None, **kw): super(Seq2Vec, self).__init__(**kw) self.maskid = maskid self.inpemb = inpemb if not issequence(enclayers): enclayers = [enclayers] self.pool = pool self.enc = SeqEncoder(inpemb, *enclayers).maskoptions(maskid, MaskMode.AUTO) if self.pool is not None: self.enc = self.enc.all_outputs
def test_memory_block_with_seq_encoder_static_fail(self): invocabsize = 5 memsize = 10 seqlen = 3 encdim = 13 data = np.random.randint(0, invocabsize, (memsize, seqlen)) gru = GRU(dim=invocabsize, innerdim=encdim) payload = SeqEncoder(IdxToOneHot(vocsize=invocabsize), gru) memb = MemoryBlock(payload, data, outdim=encdim) idxs = [0, 2, 5] self.assertRaises(AssertionError, lambda: memb.predict(idxs, data))
def __init__( self, entembdim=50, wordembdim=50, wordencdim=100, memdata=None, attdim=100, numchars=128, # number of different chars numwords=4e5, # number of different words glovepath=None, innerdim=100, # dim of memory payload encoder output outdim=1e4, # number of entities memaddr=DotMemAddr, **kw): super(FBMemMatch, self).__init__(**kw) self.wordembdim = wordembdim self.wordencdim = wordencdim self.entembdim = entembdim self.attdim = attdim self.encinnerdim = innerdim self.outdim = outdim memaddr = TransDotMemAddr # memory encoder per word #wencpg = WordEmbed(indim=numwords, outdim=self.wordembdim, trainfrac=1.0) wordencoder = WordEncoderPlusGlove(numchars=numchars, numwords=numwords, encdim=self.wordencdim, embdim=self.wordembdim, embtrainfrac=0.0, glovepath=glovepath) # memory encoder for one cell self.phraseencoder = SeqEncoder( wordencoder, GRU(dim=self.wordembdim + self.wordencdim, innerdim=self.encinnerdim)) # entity embedder entemb = VectorEmbed(indim=self.outdim, dim=self.entembdim) self.entembs = entemb( memdata[0]) #Val(np.arange(0, self.outdim, dtype="int32"))) # memory block self.mempayload = self.phraseencoder #ConcatBlock(entemb, self.phraseencoder) self.memblock = MemoryBlock( self.mempayload, memdata[1], indim=self.outdim, outdim=self.encinnerdim) # + self.entembdim) # memory addressing self.mema = memaddr(self.memblock, memdim=self.memblock.outdim, attdim=attdim, indim=self.encinnerdim)
def test_output_shape_w_mask(self): batsize = 2 seqlen = 5 dim = 3 indim = 7 m = SeqEncoder(IdxToOneHot(indim), GRU(dim=indim, innerdim=dim)).all_outputs data = np.random.randint(0, indim, (batsize, seqlen)).astype("int32") mask = np.zeros_like(data).astype("float32") mask[:, 0:2] = 1 weights = np.ones_like(data).astype("float32") mpred = m.predict(data, weights, mask) self.assertEqual(mpred.shape, (batsize, seqlen, dim))
def init(self): #MEMORY: encodes how entity is written + custom entity embeddings wencpg = WordEncoderPlusGlove(numchars=self.numchars, numwords=self.numwords, encdim=self.wordencdim, embdim=self.wordembdim, embtrainfrac=0.0, glovepath=self.glovepath) self.memenco = SeqEncoder( wencpg, GRU(dim=self.wordembdim + self.wordencdim, innerdim=self.encinnerdim)) entemb = VectorEmbed(indim=self.outdim, dim=self.entembdim) self.mempayload = ConcatBlock(entemb, self.memenco) self.memblock = MemoryBlock(self.mempayload, self.memdata, indim=self.outdim, outdim=self.encinnerdim + self.entembdim) #ENCODER: uses the same language encoder as memory #wencpg2 = WordEncoderPlusGlove(numchars=self.numchars, numwords=self.numwords, encdim=self.wordencdim, embdim=self.wordembdim, embtrainfrac=0.0, glovepath=glovepath) self.enc = RecStack( wencpg, GRU(dim=self.wordembdim + self.wordencdim, innerdim=self.encinnerdim)) #ATTENTION attgen = LinearGateAttentionGenerator(indim=self.encinnerdim + self.decinnerdim, innerdim=self.attdim) attcon = WeightedSumAttCon() #DECODER #entemb2 = VectorEmbed(indim=self.outdim, dim=self.entembdim) self.softmaxoutblock = stack( self.memaddr(self.memblock, indim=self.decinnerdim + self.encinnerdim, memdim=self.memblock.outdim, attdim=self.attdim), Softmax()) self.dec = SeqDecoder([ self.memblock, GRU(dim=self.entembdim + self.encinnerdim, innerdim=self.decinnerdim) ], outconcat=True, inconcat=False, attention=Attention(attgen, attcon), innerdim=self.decinnerdim + self.encinnerdim, softmaxoutblock=self.softmaxoutblock)
def test_memory_block_with_seq_encoder(self): invocabsize = 5 memsize = 10 seqlen = 3 encdim = 13 data = np.random.randint(0, invocabsize, (memsize, seqlen)) gru = GRU(dim=invocabsize, innerdim=encdim) payload = SeqEncoder(IdxToOneHot(vocsize=invocabsize), gru) memb = MemoryBlock(payload, data, indim=invocabsize, outdim=encdim) idxs = [0, 2, 5] memory_element = memb.predict(idxs) self.assertEqual(memory_element.shape, (len(idxs), encdim)) gruparams = set([getattr(gru, pname) for pname in gru.paramnames]) allparams = set(memb.output.allparams) self.assertEqual(gruparams.intersection(allparams), allparams)
def setUp(self): batsize = 1000 seqlen = 19 indim = 71 hdim = 51 hdim2 = 61 self.outdim = 47 self.enc = SeqEncoder(None, GRU(dim=indim, innerdim=hdim), GRU(dim=hdim, innerdim=hdim2), GRU(dim=hdim2, innerdim=self.outdim)) self.enc = self.doswitches(self.enc) self.data = np.random.random((batsize, seqlen, indim)).astype("float32") self.p = self.enc.predict self.out = self.p(self.data)
def test_mask_dynamic_pad(self): batsize = 10 seqlen = 5 dim = 6 indim = 5 m = SeqEncoder(IdxToOneHot(indim), GRU(dim=indim, innerdim=dim)).maskoption(-1).all_outputs() data = np.random.randint(0, indim, (batsize, seqlen)).astype("int32") rmasker = np.random.randint(2, seqlen, (batsize, )).astype("int32") print rmasker for i in range(data.shape[0]): data[i, rmasker[i]:] = -1 print data pred = m.predict(data) print pred
def test_mask_no_state_updates(self): batsize = 10 seqlen = 3 dim = 7 indim = 5 m = SeqEncoder(IdxToOneHot(indim), GRU(dim=indim, innerdim=dim)).maskoption(-1).all_outputs data = np.random.randint(0, indim, (batsize, seqlen)).astype("int32") data[:, 1] = 0 ndata = np.ones_like(data) * -1 data = np.concatenate([data, ndata], axis=1) pred = m.predict(data) for i in range(1, pred.shape[1]): print np.linalg.norm(pred[:, i - 1, :] - pred[:, i, :]) if i < seqlen: self.assertTrue(not np.allclose(pred[:, i - 1, :], pred[:, i, :])) else: self.assertTrue(np.allclose(pred[:, i - 1, :], pred[:, i, :]))
def test_mask_zero_mask_with_custom_maskid(self): batsize = 10 seqlen = 3 dim = 7 indim = 5 m = SeqEncoder(IdxToOneHot(indim), GRU(dim=indim, innerdim=dim)).maskoptions(-1, MaskSetMode.ZERO).all_outputs data = np.random.randint(0, indim, (batsize, seqlen)).astype("int32") data[:, 1] = 0 ndata = np.ones_like(data) * -1 data = np.concatenate([data, ndata], axis=1) pred = m.predict(data) for i in range(pred.shape[1]): print np.linalg.norm(pred[:, i - 1, :] - pred[:, i, :]) if i < seqlen: for j in range(pred.shape[0]): self.assertTrue(np.linalg.norm(pred[j, i, :]) > 0.0) else: for j in range(pred.shape[0]): self.assertTrue(np.linalg.norm(pred[j, i, :]) == 0.0)
def __init__(self, wordembdim=50, wordencdim=100, entembdim=200, innerdim=200, outdim=1e4, numwords=4e5, numchars=128, glovepath=None, **kw): super(FBSeqCompositeEncDec, self).__init__(**kw) self.indim = wordembdim + wordencdim self.outdim = outdim self.wordembdim = wordembdim self.wordencdim = wordencdim self.encinnerdim = innerdim self.entembdim = entembdim self.decinnerdim = innerdim self.enc = SeqEncoder( WordEncoderPlusGlove(numchars=numchars, numwords=numwords, encdim=self.wordencdim, embdim=self.wordembdim, embtrainfrac=0.0, glovepath=glovepath), GRU(dim=self.wordembdim + self.wordencdim, innerdim=self.encinnerdim) ) self.dec = SeqDecoder( [VectorEmbed(indim=self.outdim, dim=self.entembdim), GRU(dim=self.entembdim+self.encinnerdim, innerdim=self.decinnerdim)], inconcat=True, innerdim=self.decinnerdim, )
def test_mask_propagation_all_states(self): m = SeqEncoder(VectorEmbed(maskid=0, indim=100, dim=7), GRU(dim=7, innerdim=30)).all_outputs()\ .maskoptions(MaskSetMode.ZERO) data = np.random.randint(1, 100, (5, 3), dtype="int32") ndata = np.zeros_like(data) data = np.concatenate([data, ndata], axis=1) dataval = Val(data) embvar = m.embedder(dataval) embpred = embvar.eval() embmaskpred = embvar.mask.eval() encvar = m(dataval) encpred = encvar.eval() encmaskpred = encvar.mask.eval() print encpred.shape print encmaskpred.shape print encmaskpred self.assertTrue(np.sum(encmaskpred - embmaskpred) == 0)
def test_memory_block_with_seq_encoder_dynamic(self): invocabsize = 5 memsize = 10 seqlen = 3 encdim = 13 data = np.random.randint(0, invocabsize, (memsize, seqlen)) gru = GRU(dim=invocabsize, innerdim=encdim) payload = SeqEncoder(IdxToOneHot(vocsize=invocabsize), gru) dynmemb = MemoryBlock(payload, outdim=encdim) idxs = [0, 2, 5] p = dynmemb.predict memory_element = p(idxs, data) self.assertEqual(memory_element.shape, (len(idxs), encdim)) gruparams = set([ getattr(gru, pname) for pname in "u w b uhf whf bhf um wm bm".split() ]) allparams = set(p.outs[0].allparams) self.assertEqual(gruparams.intersection(allparams), allparams) statmemb = MemoryBlock(payload, data, outdim=encdim) statpred = statmemb.predict(idxs) self.assertTrue(np.allclose(statpred, memory_element))
def __init__(self, embedder, *layers, **kw): super(SeqTrans, self).__init__(**kw) self.enc = SeqEncoder(embedder, *layers) self.enc.all_outputs().maskoption(MaskMode.NONE)