Example #1
0
 def __init__(self,
              layers,
              softmaxoutblock=None,
              innerdim=None,
              attention=None,
              inconcat=True,
              outconcat=False,
              dropout=False,
              **kw):
     super(SeqDecoder, self).__init__(**kw)
     self.embedder = layers[0]
     self.block = RecStack(*layers[1:])
     self.outdim = innerdim
     self.attention = attention
     self.inconcat = inconcat
     self.outconcat = outconcat
     self._mask = False
     self._attention = None
     assert (isinstance(self.block, ReccableBlock))
     if softmaxoutblock is None:  # default softmax out block
         sm = Softmax()
         self.lin = Linear(indim=self.outdim,
                           dim=self.embedder.indim,
                           dropout=dropout)
         self.softmaxoutblock = asblock(lambda x: sm(self.lin(x)))
     elif softmaxoutblock is False:
         self.softmaxoutblock = asblock(lambda x: x)
     else:
         self.softmaxoutblock = softmaxoutblock
Example #2
0
 def __init__(self, enc, dec, statetrans=None, **kw):
     super(SeqEncDec, self).__init__(**kw)
     self.enc = enc
     self.dec = dec
     if isinstance(statetrans, Block):
         self.statetrans = asblock(lambda x, y: statetrans(x))
     elif statetrans is True:
         self.statetrans = asblock(lambda x, y: x)
     else:
         self.statetrans = statetrans
Example #3
0
 def __init__(self, enc, dec, statetrans=None, **kw):
     super(SeqEncDec, self).__init__(**kw)
     self.enc = enc
     self.dec = dec
     if isinstance(statetrans, Block):
         self.statetrans = asblock(lambda x, y: statetrans(x))
     elif statetrans is True:
         self.statetrans = asblock(lambda x, y: x)
     else:
         self.statetrans = statetrans
Example #4
0
 def __init__(self,
              lenc,
              renc,
              aggregator=asblock(lambda x: T.sum(x, axis=1)),
              **kw):
     self.agg = aggregator
     super(SeqMatchScore, self).__init__(lenc, renc, **kw)
Example #5
0
 def innerf(encstates, encspec, decspec):
     decspec = reduce(lambda x, y: list(x) + list(y), decspec, [])
     encspec = reduce(lambda x, y: list(x) + list(y), encspec, [])
     assert (len(decspec) == len(encspec))
     ret = []
     for i in range(len(encspec)):
         if encspec[i][0] == "state" and decspec[i][0] == "state":
             if decspec[i][1][0] != encspec[i][1][
                     0] or statetrans == "matdot":
                 t = MatDot(encspec[i][1][0], decspec[i][1][0])
             else:
                 t = asblock(lambda x: x)
         elif encspec[i][0] == decspec[i][0]:
             t = None
         else:
             raise Exception()
         ret.append(t)
     assert (len(encstates) == len(ret))
     out = []
     for encstate, rete in zip(encstates, ret):
         if rete is None:
             out.append(None)
         else:
             out.append(rete(encstate))
     return out
Example #6
0
 def setUp(self):
     dim = 50
     self.vocabsize = 2000
     data = np.arange(0, self.vocabsize).astype("int32")
     self.O = param((dim, self.vocabsize)).uniform()
     self.W = VectorEmbed(indim=self.vocabsize, dim=50)
     self.out = stack(self.W, asblock(lambda x: T.dot(self.O, x)),
                      Softmax())(Input(ndim=1, dtype="int32"))
Example #7
0
 def setUp(self):
     dim=50
     self.vocabsize=2000
     data = np.arange(0, self.vocabsize).astype("int32")
     self.O = param((dim, self.vocabsize)).uniform()
     self.W = VectorEmbed(indim=self.vocabsize, dim=50)
     self.out = stack(self.W,
           asblock(lambda x: T.dot(self.O, x)),
           Softmax())(Input(ndim=1, dtype="int32"))
Example #8
0
 def __init__(self, layers, softmaxoutblock=None, innerdim=None, attention=None, inconcat=False, outconcat=False, **kw): # limit says at most how many is produced
     self.embedder = layers[0]
     self.outdim = innerdim
     self.inconcat = inconcat
     self.outconcat = outconcat
     self.attention = attention
     super(SeqDecoder, self).__init__(*layers[1:], **kw)     # puts layers into a ReccableBlock
     self._mask = False
     self._attention = None
     assert(isinstance(self.block, ReccableBlock))
     if softmaxoutblock is None: # default softmax out block
         sm = Softmax()
         self.lin = MatDot(indim=self.outdim, dim=self.embedder.indim)
         self.softmaxoutblock = asblock(lambda x: sm(self.lin(x)))
     else:
         self.softmaxoutblock = softmaxoutblock
     self.init_states = None
Example #9
0
 def __init__(self, lenc, renc,
              aggregator=asblock(lambda x: T.sum(x, axis=1)), **kw):
     self.agg = aggregator
     super(SeqMatchScore, self).__init__(lenc, renc, **kw)