Exemplo n.º 1
0
 def __init__(self,
              indim=400,
              inpembdim=50,
              inpemb=None,
              mode="concat",
              innerdim=100,
              numouts=1,
              maskid=0,
              bidir=False,
              maskmode=MaskMode.NONE,
              **kw):
     super(SimpleSeq2MultiVec, self).__init__(**kw)
     if inpemb is None:
         if inpembdim is None:
             inpemb = IdxToOneHot(indim)
             inpembdim = indim
         else:
             inpemb = VectorEmbed(indim=indim, dim=inpembdim)
     elif inpemb is False:
         inpemb = None
     else:
         inpembdim = inpemb.outdim
     if not issequence(innerdim):
         innerdim = [innerdim]
     innerdim[-1] += numouts
     rnn, lastdim = self.makernu(inpembdim, innerdim, bidir=bidir)
     self.outdim = lastdim * numouts
     self.maskid = maskid
     self.inpemb = inpemb
     self.numouts = numouts
     self.mode = mode
     if not issequence(rnn):
         rnn = [rnn]
     self.enc = SeqEncoder(inpemb, *rnn).maskoptions(maskid, maskmode)
     self.enc.all_outputs()
Exemplo n.º 2
0
 def __init__(self,
              indim=100,
              innerdim=200,
              window=5,
              poolmode="max",
              activation=Tanh,
              stride=1,
              **kw):
     super(CNNEnc, self).__init__(**kw)
     self.layers = []
     if not issequence(innerdim):
         innerdim = [innerdim]
     if not issequence(window):
         window = [window] * len(innerdim)
     if not issequence(activation):
         activation = [activation()] * len(innerdim)
     else:
         activation = [act() for act in activation]
     if not issequence(stride):
         stride = [stride] * len(innerdim)
     assert (len(window) == len(innerdim))
     innerdim = [indim] + innerdim
     for i in range(1, len(innerdim)):
         layer = Conv1D(indim=innerdim[i - 1],
                        outdim=innerdim[i],
                        window=window[i - 1],
                        stride=stride[i - 1])
         self.layers.append(layer)
         self.layers.append(activation[i - 1])
     self.layers.append(GlobalPool1D(mode=poolmode))
Exemplo n.º 3
0
 def __init__(self,
              indim=400,
              inpembdim=50,
              inpemb=None,
              innerdim=100,
              maskid=None,
              bidir=False,
              **kw):
     if inpemb is None:
         if inpembdim is None:
             inpemb = IdxToOneHot(indim)
             inpembdim = indim
         else:
             inpemb = VectorEmbed(indim=indim, dim=inpembdim)
     else:
         inpembdim = inpemb.outdim
     lastdim = inpembdim
     if not issequence(innerdim):  # single encoder
         innerdim = [innerdim]
     rnns = []
     for innerdimi in innerdim:
         if not issequence(innerdimi):  # one layer in encoder
             innerdimi = [innerdimi]
         rnn, lastdim = MakeRNU.make(lastdim, innerdimi, bidir=bidir)
         rnns.append(rnn)
     self.outdim = lastdim
     super(SimpleSeqStar2Vec, self).__init__(inpemb,
                                             *rnns,
                                             maskid=maskid,
                                             **kw)
Exemplo n.º 4
0
    def __init__(self,
                 inpvocsize=400,
                 inpembdim=50,
                 outvocsize=100,
                 outembdim=50,
                 encdim=100,
                 decdim=100,
                 attdim=100,
                 bidir=False,
                 rnu=GRU,
                 outconcat=True,
                 inconcat=True, **kw):
        super(SeqEncDecAtt, self).__init__(**kw)
        self.inpvocsize = inpvocsize
        self.outvocsize = outvocsize
        self.inpembdim = inpembdim
        self.outembdim = outembdim
        self.encinnerdim = [encdim] if not issequence(encdim) else encdim
        self.decinnerdim = [decdim] if not issequence(decdim) else decdim
        self.attdim = attdim
        self.rnu = rnu
        self.bidir = bidir

        # encoder stack
        self.inpemb = VectorEmbed(indim=self.inpvocsize, dim=self.inpembdim)
        self.encrnus = []
        dims = [self.inpembdim] + self.encinnerdim
        i = 1
        while i < len(dims):
            if self.bidir:
                rnu = BiRNU.fromrnu(self.rnu, dim=dims[i-1], innerdim=dims[i])
            else:
                rnu = self.rnu(dim=dims[i-1], innerdim=dims[i])
            self.encrnus.append(rnu)
            i += 1
        self.encoder = RecurrentStack(*([self.inpemb] + self.encrnus))

        # attention
        self.attgen = LinearGateAttentionGenerator(indim=self.encinnerdim[-1] + self.decinnerdim[-1], attdim=self.attdim)
        self.attcon = WeightedSumAttCon()

        # decoder
        self.outemb = VectorEmbed(indim=self.outvocsize, dim=self.outembdim)
        self.decrnus = []
        dims = [self.outembdim + self.encinnerdim[-1]] + self.decinnerdim
        i = 1
        while i < len(dims):
            self.decrnus.append(self.rnu(dim=dims[i-1], innerdim=dims[i]))
            i += 1
        self.decoder = SeqDecoder(
            [self.outemb] + self.decrnus,
            attention=Attention(self.attgen, self.attcon),
            innerdim=self.encinnerdim[-1] + self.decinnerdim[-1],
            outconcat=outconcat,
            inconcat=inconcat,
        )
Exemplo n.º 5
0
 def accumulate(self, gold, pred):
     if issequence(pred):
         pred = pred[0][0]
     else:
         pred = [pred]
     if issequence(gold):
         assert(len(gold) == 1)
         gold = gold[0]
     else:
         gold = [gold]
     if np.array_equal(gold, pred):
         self.acc += 1
     self.div += 1
Exemplo n.º 6
0
 def accumulate(self, gold, pred):
     if issequence(pred):
         pred = pred[0][0]
     else:
         pred = [pred]
     if issequence(gold):
         assert (len(gold) == 1)
         gold = gold[0]
     else:
         gold = [gold]
     if np.array_equal(gold, pred):
         self.acc += 1
     self.div += 1
Exemplo n.º 7
0
 def __init__(self,
              numchars=256,
              charembdim=50,
              charemb=None,
              charinnerdim=100,
              numwords=1000,
              wordembdim=100,
              wordemb=None,
              wordinnerdim=200,
              maskid=None,
              bidir=False,
              returnall=False,
              **kw):
     # char level inits
     if charemb is None:
         charemb = VectorEmbed(indim=numchars, dim=charembdim)
     else:
         charemb = charemb
         charembdim = charemb.outdim
     if not issequence(charinnerdim):
         charinnerdim = [charinnerdim]
     charlayers, lastchardim = MakeRNU.make(charembdim,
                                            charinnerdim,
                                            bidir=bidir)
     charenc = SeqEncoder(charemb,
                          *charlayers).maskoptions(maskid, MaskMode.AUTO)
     # word level inits
     if wordemb is None:
         wordemb = VectorEmbed(indim=numwords, dim=wordembdim)
     elif wordemb is False:
         wordemb = None
         wordembdim = 0
     else:
         wordemb = wordemb
         wordembdim = wordemb.outdim
     if not issequence(wordinnerdim):
         wordinnerdim = [wordinnerdim]
     wordlayers, outdim = MakeRNU.make(wordembdim + lastchardim,
                                       wordinnerdim,
                                       bidir=bidir)
     wordenc = SeqEncoder(None, *wordlayers).maskoptions(MaskMode.NONE)
     if returnall:
         wordenc.all_outputs()
     self.outdim = outdim
     super(WordCharSentEnc, self).__init__(l1enc=charenc,
                                           l2emb=wordemb,
                                           l2enc=wordenc,
                                           maskid=maskid)
Exemplo n.º 8
0
 def make(initdim,
          specs,
          rnu=GRU,
          bidir=False,
          zoneout=False,
          dropout_in=False,
          dropout_h=False):
     if not issequence(specs):
         specs = [specs]
     rnns = []
     prevdim = initdim
     for spec in specs:
         fspec = {"dim": None, "bidir": bidir, "rnu": rnu}
         if isinstance(spec, int):
             fspec["dim"] = spec
         elif isinstance(spec, dict):
             assert (hasattr(spec, "dim") and set(spec.keys()).union(
                 set(fspec.keys())) == set(fspec.keys()))
             fspec.update(spec)
         if fspec["bidir"] == True:
             rnn = BiRNU.fromrnu(fspec["rnu"],
                                 dim=prevdim,
                                 innerdim=fspec["dim"])
             prevdim = fspec["dim"] * 2
         else:
             rnn = fspec["rnu"](dim=prevdim,
                                innerdim=fspec["dim"],
                                zoneout=zoneout,
                                dropout_h=dropout_h,
                                dropout_in=dropout_in)
             prevdim = fspec["dim"]
         rnns.append(rnn)
     return rnns, prevdim
Exemplo n.º 9
0
 def get_init_info(self, initstates):
     recurrentlayers = list(
         filter(lambda x: isinstance(x, ReccableBlock), self.layers))
     assert (len(
         filter(
             lambda x: isinstance(x, RecurrentBlock) and not isinstance(
                 x, ReccableBlock),
             self.layers)) == 0)  # no non-reccable blocks allowed
     if issequence(
             initstates
     ):  # fill up init state args so that layers for which no init state is specified get default arguments that lets them specify a default init state
         # if is a sequence, expecting a value, not batsize
         if len(
                 initstates
         ) < self.numstates:  # top layers are being given the given init states, bottoms make their own default
             initstates = [None] * (self.numstates -
                                    len(initstates)) + initstates
         batsize = 0
         for initstate in initstates:
             if initstate is not None:
                 batsize = initstate.shape[0]
         initstates = [
             batsize if initstate is None else initstate
             for initstate in initstates
         ]
     else:  # expecting a batsize as initstate arg
         initstates = [initstates] * self.numstates
     init_infos = []
     for recurrentlayer in recurrentlayers:  # from bottom layers to top
         arg = initstates[:recurrentlayer.numstates]
         initstates = initstates[recurrentlayer.numstates:]
         initinfo = recurrentlayer.get_init_info(arg)
         init_infos.extend(initinfo)
     return init_infos
Exemplo n.º 10
0
 def apply(self, fn, **kwargs):
     trueargs = recurmap(lambda x: x.d if hasattr(x, "d") else x, kwargs)
     o, updates = theano.scan(self.fnwrap(fn), **trueargs)
     ret = [Var(oe) for oe in o] if issequence(o) else Var(o)
     for var in recurfilter(lambda x: isinstance(x, Var), ret):
         var.push_params(self._recparams)
     return ret, updates
Exemplo n.º 11
0
 def __init__(self,
              indim=500,
              inpembdim=100,
              inpemb=None,
              innerdim=200,
              bidir=False,
              maskid=None,
              dropout_in=False,
              dropout_h=False,
              rnu=GRU,
              **kw):
     self.bidir = bidir
     inpemb, inpembdim = SeqEncoder.getemb(inpemb,
                                           inpembdim,
                                           indim,
                                           maskid=maskid)
     if not issequence(innerdim):
         innerdim = [innerdim]
     #self.outdim = innerdim[-1] if not bidir else innerdim[-1] * 2
     layers, lastdim = MakeRNU.make(inpembdim,
                                    innerdim,
                                    bidir=bidir,
                                    rnu=rnu,
                                    dropout_in=dropout_in,
                                    dropout_h=dropout_h)
     self._lastdim = lastdim
     super(RNNSeqEncoder, self).__init__(inpemb, *layers, **kw)
Exemplo n.º 12
0
 def _build(self, *inps):
     res = self.wrapply(*inps)
     if issequence(res):
         output = res[0]
     else:
         output = res
     return output
Exemplo n.º 13
0
 def apply(self, fn, **kwargs):
     trueargs = recurmap(lambda x: x.d if hasattr(x, "d") else x, kwargs)
     oldupdates = _get_updates_from(kwargs)
     numouts, numberofextraargs = self.getnumberofextraargs(fn, **kwargs)
     #numouts, numberofextraargs = None, None    # TODO switch this line to go back
     if ("outputs_info" not in trueargs or trueargs["outputs_info"] is None)\
             and numouts is not None:
         trueargs["outputs_info"] = [None] * numouts
     if numberofextraargs is not None:
         trueargs["outputs_info"] += [None] * numberofextraargs
     o, newupdates = theano.scan(self.fnwrap(fn), **trueargs)
     o = [o] if not issequence(o) else o
     ret = [Var(oe) for oe in o]
     extra_out = None
     if numouts is not None and numberofextraargs is not None:
         extra_out = ret[numouts:]
         ret = ret[:numouts]
     for var in recurfilter(lambda x: isinstance(x, Var), ret):
         var.push_params(self._recparams)
         var.push_updates(oldupdates)
         var.push_updates(newupdates)
         if extra_out is not None:
             var.push_extra_outs(dict(zip(self._rec_extra_outs, extra_out)))
     #print updates
     if len(ret) == 1:
         ret = ret[0]
     return ret
Exemplo n.º 14
0
 def recappl(self, inps, states):
     numrecargs = getnumargs(self.rec) - 2  # how much to pop from states
     mystates = states[:numrecargs]
     tail = states[numrecargs:]
     inps = [inps] if not issequence(inps) else inps
     outs = self.rec(*(inps + mystates))
     return outs[0], outs[1:], tail
Exemplo n.º 15
0
 def build(self, inps):  # data: (batsize, ...)
     batsize = inps[0].shape[0]
     inits = self.model.get_init_info(*(list(self.buildargs) + [batsize]))
     nonseqs = []
     if isinstance(inits, tuple):
         nonseqs = inits[1]
         inits = inits[0]
     inpvars = [Input(ndim=inp.ndim, dtype=inp.dtype) for inp in inps]
     if self.transf is not None:
         tinpvars = self.transf(*inpvars)
         if not issequence(tinpvars):
             tinpvars = (tinpvars, )
         tinpvars = list(tinpvars)
     else:
         tinpvars = inpvars
     statevars = [self.wrapininput(x) for x in inits]
     nonseqvars = [self.wrapininput(x) for x in nonseqs]
     out = self.model.rec(*(tinpvars + statevars + nonseqvars))
     alloutvars = out
     self.f = theano.function(
         inputs=[x.d for x in inpvars + statevars + nonseqvars],
         outputs=[x.d for x in alloutvars],
         on_unused_input="warn")
     self.statevals = [self.evalstate(x) for x in inits]
     self.nonseqvals = [self.evalstate(x) for x in nonseqs]
Exemplo n.º 16
0
 def recappl(self, inps, states):
     numrecargs = getnumargs(self.rec) - 2       # how much to pop from states
     mystates = states[:numrecargs]
     tail = states[numrecargs:]
     inps = [inps] if not issequence(inps) else inps
     outs = self.rec(*(inps + mystates))
     return outs[0], outs[1:], tail
Exemplo n.º 17
0
 def __init__(self,
              indim=500,
              inpembdim=100,
              inpemb=None,
              innerdim=200,
              bidir=False,
              maskid=None,
              zoneout=False,
              dropout_in=False,
              dropout_h=False,
              **kw):
     if inpemb is None:
         inpemb = VectorEmbed(indim=indim, dim=inpembdim, maskid=maskid)
     elif inpemb is False:
         inpemb = None
     else:
         inpembdim = inpemb.outdim
     if not issequence(innerdim):
         innerdim = [innerdim]
     layers, _ = MakeRNU.make(inpembdim,
                              innerdim,
                              bidir=bidir,
                              zoneout=zoneout,
                              dropout_in=dropout_in,
                              dropout_h=dropout_h)
     super(RNNSeqEncoder, self).__init__(inpemb, *layers, **kw)
Exemplo n.º 18
0
 def __init__(self, indim=400, embdim=50, innerdim=100, outdim=50, **kw):
     self.emb = VectorEmbed(indim=indim, dim=embdim)
     if not issequence(innerdim):
         innerdim = [innerdim]
     innerdim = [embdim] + innerdim
     self.rnn = self.getrnnfrominnerdim(innerdim)
     super(SimpleSeqTransducer, self).__init__(self.emb, *self.rnn, smodim=innerdim[-1], outdim=outdim, **kw)
Exemplo n.º 19
0
 def apply(self, fn, **kwargs):
     trueargs = recurmap(lambda x: x.d if hasattr(x, "d") else x, kwargs)
     o, updates = theano.scan(self.fnwrap(fn), **trueargs)
     ret = [Var(oe) for oe in o] if issequence(o) else Var(o)
     for var in recurfilter(lambda x: isinstance(x, Var), ret):
         var.push_params(self._recparams)
     return ret, updates
Exemplo n.º 20
0
 def test_all_output_parameters(self):
     outputs = self.enc.wrapply(*self.p.inps)
     if issequence(outputs) and len(outputs) > 1:
         outputparamsets = [x.allparams for x in outputs if isinstance(x, (Var, Val))]
         for i in range(len(outputparamsets)):
             for j in range(i, len(outputparamsets)):
                 self.assertSetEqual(outputparamsets[i], outputparamsets[j])
     if issequence(outputs):
         outputs = outputs[0]
     outputparamcounts = {}
     for paramname in [x.name for x in outputs.allparams]:
         if paramname not in outputparamcounts:
             outputparamcounts[paramname] = 0
         outputparamcounts[paramname] += 1
     for (_, y) in outputparamcounts.items():
         self.assertEqual(y, self.expectednumberparams)
     self.assertSetEqual(set(outputparamcounts.keys()), set(self.expectedparams))
Exemplo n.º 21
0
 def test_all_output_parameters(self):
     outputs = self.enc.wrapply(*self.enc.inputs)
     if issequence(outputs) and len(outputs) > 1:
         outputparamsets = [x.allparams for x in outputs]
         for i in range(len(outputparamsets)):
             for j in range(i, len(outputparamsets)):
                 self.assertSetEqual(outputparamsets[i], outputparamsets[j])
     if issequence(outputs):
         outputs = outputs[0]
     outputparamcounts = {}
     for paramname in [x.name for x in outputs.allparams]:
         if paramname not in outputparamcounts:
             outputparamcounts[paramname] = 0
         outputparamcounts[paramname] += 1
     for (_, y) in outputparamcounts.items():
         self.assertEqual(y, self.expectednumberparams)
     self.assertSetEqual(set(outputparamcounts.keys()), set(self.expectedparams))
Exemplo n.º 22
0
 def __init__(self, inpemb, enclayers, maskid=0, pool=None, **kw):
     super(Seq2Vec, self).__init__(**kw)
     self.maskid = maskid
     self.inpemb = inpemb
     if not issequence(enclayers):
         enclayers = [enclayers]
     self.pool = pool
     self.enc = SeqEncoder(inpemb, *enclayers).maskoptions(maskid, MaskMode.AUTO)
     if self.pool is not None:
         self.enc = self.enc.all_outputs
Exemplo n.º 23
0
        def fwrapper(*args):  # theano vars
            trueargs = [Var(x, name="innerrecwrapvarwrap") for x in args]
            res = fn(*trueargs)  # has the params from inner rec
            ret = recurmap(lambda x: x.d if hasattr(x, "d") else x, res)
            if not issequence(ret):
                ret = (ret, )
            if issequence(ret):
                ret = tuple(ret)
            outvars = recurfilter(lambda x: isinstance(x, Var), res)
            for var in outvars:
                scanblock._recparams.update(var._params)
                for k, extra_out in sorted(var._extra_outs.items(),
                                           key=lambda (a, b): a):
                    ret += (extra_out.d, )
                    scanblock._rec_extra_outs.append(k)


#                scanblock._rec_extra_outs.update(var._extra_outs)
            return ret
Exemplo n.º 24
0
 def fwrapper(*args):  # theano vars
     trueargs = [Var(x, name="innerrecwrapvarwrap") for x in args]
     res = fn(*trueargs)  # has the params from inner rec
     ret = recurmap(lambda x: x.d if hasattr(x, "d") else x, res)
     if issequence(ret):
         ret = tuple(ret)
     outvars = recurfilter(lambda x: isinstance(x, Var), res)
     for var in outvars:
         scanblock._recparams.update(var._params)
     return ret
Exemplo n.º 25
0
 def fwrapper(*args): # theano vars
     trueargs = [Var(x, name="innerrecwrapvarwrap") for x in args]
     res = fn(*trueargs) # has the params from inner rec
     ret = recurmap(lambda x: x.d if hasattr(x, "d") else x, res)
     if issequence(ret):
         ret = tuple(ret)
     outvars = recurfilter(lambda x: isinstance(x, Var), res)
     for var in outvars:
         scanblock._recparams.update(var._params)
     return ret
Exemplo n.º 26
0
 def do_get_init_info(self, initstates):
     if issequence(initstates):
         c_t0 = initstates[0]
         red = initstates[1:]
         y_t0 = T.zeros((c_t0.shape[0], self.innerdim))
     else:
         c_t0 = T.zeros((initstates, self.innerdim))
         red = initstates
         y_t0 = T.zeros((initstates, self.innerdim))
     return [y_t0, c_t0], red
Exemplo n.º 27
0
 def get_init_info(self, initstates):    # either a list of init states or the batsize
     if not issequence(initstates):
         initstates = [initstates] * self.numstates
     acc = []
     for initstate in initstates:
         if isinstance(initstate, int) or initstate.ndim == 0:
             acc.append(T.zeros((initstate, self.innerdim)))
         else:
             acc.append(initstate)
     return acc
Exemplo n.º 28
0
 def validfun(*sampleinps):
     ret = symbolic_validfun(*sampleinps)
     for ev in extravalid:
         a = ev(*sampleinps)
         if not issequence(a):
             a = [a]
         else:
             if isinstance(a, tuple):
                 a = list(a)
         ret += a
     return ret
Exemplo n.º 29
0
 def getnumberofextraargs(self, fn, **kwargs):
     seqs = kwargs["sequences"]
     seqs = [] if seqs is None else seqs
     seqs = [seqs] if not issequence(seqs) else seqs
     seqs = [seq[0] for seq in seqs]
     nonseqs = kwargs["non_sequences"] if "non_sequences" in kwargs else None
     nonseqs = [] if nonseqs is None else nonseqs
     nonseqs = [nonseqs] if not issequence(nonseqs) else nonseqs
     initinfos = kwargs["outputs_info"] if "outputs_info" in kwargs else None
     initinfos = [None] * len(seqs) if initinfos is None else initinfos
     initinfos = [initinfos] if not issequence(initinfos) else initinfos
     fnargs = seqs + filter(lambda x: x is not None, initinfos)
     fnappl = fn(*(fnargs + nonseqs))
     if not issequence(fnappl):
         fnappl = [fnappl]
     numouts = len(fnappl)
     numextraouts = 0
     for realout in fnappl:
         numextraouts += len(realout.all_extra_outs)
     return numouts, numextraouts
Exemplo n.º 30
0
 def __init__(self, indim=400, embdim=50, innerdim=100, outdim=50, **kw):
     self.emb = VectorEmbed(indim=indim, dim=embdim)
     if not issequence(innerdim):
         innerdim = [innerdim]
     innerdim = [embdim] + innerdim
     self.rnn = self.getrnnfrominnerdim(innerdim)
     super(SimpleSeqTransducer, self).__init__(self.emb,
                                               *self.rnn,
                                               smodim=innerdim[-1],
                                               outdim=outdim,
                                               **kw)
Exemplo n.º 31
0
 def get_init_info(
         self, initstates):  # either a list of init states or the batsize
     if not issequence(initstates):
         initstates = [initstates] * self.numstates
     acc = []
     for initstate in initstates:
         if isinstance(initstate, int) or initstate.ndim == 0:
             acc.append(T.zeros((initstate, self.innerdim)))
         else:
             acc.append(initstate)
     return acc
Exemplo n.º 32
0
 def __init__(self, outlayers, **kw):
     super(Vec2Idx, self).__init__(**kw)
     if isinstance(outlayers, MemoryStack):
         out = outlayers
     else:
         if not issequence(outlayers):
             outlayers = [outlayers]
         if type(outlayers[-1]) is not Softmax:
             outlayers.append(Softmax())
         out = stack(*outlayers)
     self.out = out
Exemplo n.º 33
0
 def __init__(self, outlayers, **kw):
     super(Vec2Idx, self).__init__(**kw)
     if isinstance(outlayers, MemoryStack):
         out = outlayers
     else:
         if not issequence(outlayers):
             outlayers = [outlayers]
         if type(outlayers[-1]) is not Softmax:
             outlayers.append(Softmax())
         out = stack(*outlayers)
     self.out = out
Exemplo n.º 34
0
 def __init__(self, **kw):
     super(SimpleSeq2Sca, self).__init__(**kw)
     self.enc.all_outputs().with_mask()
     if "innerdim" in kw:
         kwindim = kw["innerdim"]
         if issequence(kwindim):
             summdim = kwindim[-1]
         else:
             summdim = kwindim
     else:
         summdim = 100
     self.summ = param((summdim, ), name="summarize").uniform()
Exemplo n.º 35
0
 def do_get_init_info(self, initstates):
     if issequence(initstates):
         h_t0 = initstates[0]
         mem_t0 = initstates[1]
         red = initstates[2:]
         m_t0 = T.zeros((h_t0.shape[0], self.innerdim))
     else:       # initstates is batchsize scalar
         h_t0 = T.zeros((initstates, self.innerdim))
         mem_t0 = T.zeros((initstates, self.memsize, self.innerdim))
         red = initstates
         m_t0 = T.zeros((initstates, self.innerdim))
     return [m_t0, mem_t0, h_t0], red
Exemplo n.º 36
0
 def do_get_init_info(self, initstates):
     if issequence(initstates):
         h_t0 = initstates[0]
         mem_t0 = initstates[1]
         red = initstates[2:]
         m_t0 = T.zeros((h_t0.shape[0], self.innerdim))
     else:  # initstates is batchsize scalar
         h_t0 = T.zeros((initstates, self.innerdim))
         mem_t0 = T.zeros((initstates, self.memsize, self.innerdim))
         red = initstates
         m_t0 = T.zeros((initstates, self.innerdim))
     return [m_t0, mem_t0, h_t0], red
Exemplo n.º 37
0
    def autobuild(self, *inputdata, **kwinputdata):
        transform = None
        trainmode = False
        batsize = None
        if "transform" in kwinputdata:
            transform = kwinputdata.pop("transform")
        if "_trainmode" in kwinputdata:
            trainmode = kwinputdata.pop("_trainmode")
        if "_batsize" in kwinputdata:
            batsize = kwinputdata.pop("_batsize")
        inputdata = map(
            lambda x: x if isinstance(x, (np.ndarray, DataFeed)) else
            (np.asarray(x) if x is not None else None), inputdata)
        for k in kwinputdata:
            x = kwinputdata[k]
            kwinputdata[k] = x if isinstance(x, (np.ndarray, DataFeed)) else (
                np.asarray(x) if x is not None else x)
        inputs = []
        kwinputs = {}
        inpnum = 1
        for td in inputdata:
            tdshape = list(td.shape)
            tdshape[0] = batsize if batsize is not None else tdshape[0]
            inputs.append(None if td is None else Input(ndim=td.ndim,
                                                        dtype=td.dtype,
                                                        shape=tdshape,
                                                        name="inp:%d" %
                                                        inpnum))
            inpnum += 1
        for k in kwinputdata:
            td = kwinputdata[k]
            tdshape = list(td.shape)
            tdshape[0] = batsize if batsize is not None else tdshape[0]
            kwinputs[k] = None if td is None else Input(ndim=td.ndim,
                                                        dtype=td.dtype,
                                                        shape=tdshape,
                                                        name="kwinp:%s" % k)

        kwinputl = kwinputs.items()
        if transform is not None:
            kwinputl.append(("transform", transform))
        kwinputl.append(("_trainmode", trainmode))
        kwinputl = dict(kwinputl)
        output = self.wrapply(*inputs, **kwinputl)

        kwn = []
        for k in sorted(kwinputs.keys()):
            kwn.append(kwinputs[k])

        outinputs = tuple(inputs) + tuple(kwn)
        outinputs = filter(lambda x: x is not None, outinputs)
        output = (output, ) if not issequence(output) else output
        return outinputs, output
Exemplo n.º 38
0
 def innerapply(self, x, mask=None, initstates=None):
     assert (x.ndim == 3 and (mask is None or mask.ndim == 2))
     if initstates is None:
         infoarg = x.shape[0]  # batsize
     else:
         infoarg = initstates
         assert (issequence(infoarg))
     inputs = x.dimswap(1, 0)  # inputs is (seq_len, batsize, dim)
     init_info = self.get_init_info(infoarg)
     if mask is None:
         outputs = T.scan(fn=self.rec,
                          sequences=inputs,
                          outputs_info=[None] + init_info,
                          go_backwards=self._reverse)
     else:
         outputs = T.scan(fn=self.recwmask,
                          sequences=[inputs, mask.dimswap(1, 0)],
                          outputs_info=[None] + init_info,
                          go_backwards=self._reverse)
     if not issequence(outputs):
         outputs = [outputs]
     outputs = [x.dimswap(1, 0) for x in outputs]
     return outputs[0][:, -1, :], outputs[0], outputs[1:]
Exemplo n.º 39
0
 def validfun(*sampleinps):
     ret = []
     if symbolic_validfun is not None:
         for x in symbolic_validfun(*sampleinps):
             ret.append(x)
     for ev in extravalid:
         a = ev(*sampleinps)
         if not issequence(a):
             a = [a]
         else:
             if isinstance(a, tuple):
                 a = list(a)
         ret += a
     return ret
Exemplo n.º 40
0
        def fwrapper(*args): # theano vars
            trueargs = [Var(x, name="innerrecwrapvarwrap") for x in args]
            res = fn(*trueargs)
            ret = recurmap(lambda x: x.d if hasattr(x, "d") else x, res)
            if issequence(ret):
                ret = tuple(ret)
            newparents = recurfilter(lambda x: isinstance(x, (Var, Val, until)), res)

            for npa in newparents:
                scanblock.add_parent(npa)
            #self.add_params(reduce(lambda x, y: set(x).union(set(y)),
            #                       map(lambda x: x.allparams, recurfilter(lambda x: isinstance(x, Var), res)), set()))
            #self.add_params(recurfilter(lambda x: isinstance(x, Parameter), res))
            return ret
Exemplo n.º 41
0
 def __init__(self, baseemb, *layersforencs, **kw):
     super(SeqStar2Vec, self).__init__(**kw)
     self.maskid = None if "maskid" not in kw else kw["maskid"]
     self.encoders = []
     atbase = True
     for layers in layersforencs:
         if not issequence(layers):
             layers = [layers]
         if atbase:
             enc = SeqEncoder(baseemb, *layers).maskoptions(MaskMode.NONE)
             atbase = False
         else:
             enc = SeqEncoder(None, *layers).maskoptions(MaskMode.NONE)
         self.encoders.append(enc)
Exemplo n.º 42
0
 def apply(self, x, initstates=None):
     if initstates is None:
         infoarg = x.shape[0]    # batsize
     else:
         infoarg = initstates
         assert(issequence(infoarg))
     inputs = x.dimswap(1, 0) # inputs is (seq_len, batsize, dim)
     init_info = self.get_init_info(infoarg)
     outputs, _ = T.scan(fn=self.rec,
                         sequences=inputs,
                         outputs_info=[None]+init_info,
                         go_backwards=self._reverse)
     output = outputs[0]
     return output.dimswap(1, 0) # return is (batsize, seqlen, dim)
Exemplo n.º 43
0
 def __init__(self, block=None, data=None, indim=200, outdim=50, **kw):
     assert(block is not None)
     ourdata = []
     if not issequence(data):
         data = [data]
     for datae in data:
         if not isinstance(datae, (Var, Val)) and datae is not None:
             ourdata.append(Val(datae))
         else:
             ourdata.append(datae)
     assert(isinstance(block, Block))
     self.data = ourdata
     super(MemoryBlock, self).__init__(indim, outdim, **kw)      # outdim = outdim of the contained block
     self.payload = block
     self.innervar = self.payload(*self.data) if None not in data else None    # innervar: (indim, outdim)
Exemplo n.º 44
0
    def get_inits(self, initstates=None, batsize=None, ctx=None, ctxmask=None):
        if initstates is None:
            initstates = batsize
        elif issequence(initstates):
            if len(
                    initstates
            ) < self.numstates:  # fill up with batsizes for lower layers
                initstates = [batsize *
                              (self.numstates - len(initstates))] + initstates

        ctxmask = ctx.mask if ctxmask is None else ctxmask
        ctxmask = T.ones(ctx.shape[:2],
                         dtype="float32") if ctxmask is None else ctxmask
        nonseqs = [ctxmask, ctx]
        return self.get_init_info(initstates), nonseqs
Exemplo n.º 45
0
 def __init__(self, block=None, data=None, indim=200, outdim=50, **kw):
     assert(block is not None)
     ourdata = []
     if not issequence(data):
         data = [data]
     for datae in data:
         if not isinstance(datae, (Var, Val)) and datae is not None:
             ourdata.append(Val(datae))
         else:
             ourdata.append(datae)
     assert(isinstance(block, Block))
     self.data = ourdata
     super(MemoryBlock, self).__init__(indim, outdim, **kw)      # outdim = outdim of the contained block
     self.payload = block
     self.innervar = self.payload(*self.data) if None not in data else None    # innervar: (indim, outdim)
Exemplo n.º 46
0
 def apply(self, *args):  # args is a tuple of tuples of *args and **kwargs for each of the blocks in the concatenation
     res = []
     for block, arg in zip(self.blocks, args):
         if self.argfun is not None:
             arglist, argdic = self.argfun(arg)
         elif issequence(arg):
             assert(len(arg) < 3 and len(arg) > 0)
             arglist = arg[0]
             argdic = arg[1] if len(arg) > 1 else {}
         elif isinstance(arg, (Var, Val)):
             arglist = [arg]
             argdic = {}
         else:
             raise Exception("something wrong with concat's arguments: " + str(args))
         res.append(block(*arglist, **argdic))
     return T.concatenate(res, axis=self.axis)
Exemplo n.º 47
0
 def innerapply(self, x, mask=None, initstates=None):
     assert(x.ndim == 3 and (mask is None or mask.ndim == 2))
     if initstates is None:
         infoarg = x.shape[0]    # batsize
     else:
         infoarg = initstates
         assert(issequence(infoarg))
     inputs = x.dimswap(1, 0) # inputs is (seq_len, batsize, dim)
     init_info = self.get_init_info(infoarg)
     if mask is None:
         outputs, _ = T.scan(fn=self.rec,
                             sequences=inputs,
                             outputs_info=[None]+init_info,
                             go_backwards=self._reverse)
     else:
         outputs, _ = T.scan(fn=self.recwmask,
                             sequences=[inputs, mask.dimswap(1, 0)],
                             outputs_info=[None] + init_info,
                             go_backwards=self._reverse)
     outputs = [x.dimswap(1, 0) for x in outputs]
     return outputs[0][:, -1, :], outputs[0], outputs[1:]
Exemplo n.º 48
0
    def autobuild(self, *inputdata, **kwinputdata):
        transform = None
        if "transform" in kwinputdata:
            transform = kwinputdata.pop("transform")
        inputdata = map(lambda x:
                        x if isinstance(x, (np.ndarray, DataFeed)) else (np.asarray(x) if x is not None else None),
                        inputdata)
        for k in kwinputdata:
            x = kwinputdata[k]
            kwinputdata[k] = x if isinstance(x, (np.ndarray, DataFeed)) else (np.asarray(x)
                                                  if x is not None else x)
        inputs = []
        kwinputs = {}
        inpnum = 1
        for td in inputdata:
            inputs.append(None if td is None else Input(ndim=td.ndim, dtype=td.dtype, name="inp:%d" % inpnum))
            inpnum += 1
        for k in kwinputdata:
            td = kwinputdata[k]
            kwinputs[k] = None if td is None else Input(ndim=td.ndim, dtype=td.dtype, name="kwinp:%s" % k)

        kwinputl = kwinputs.items()
        if transform is not None:
            kwinputl.append(("transform", transform))
        output = self._build(*inputs, **dict(kwinputl))

        kwn = []
        for k in sorted(kwinputs.keys()):
            kwn.append(kwinputs[k])

        outinputs = tuple(inputs) + tuple(kwn)
        outinputs = filter(lambda x: x is not None, outinputs)
        output = (output,) if not issequence(output) else output
        self.inputs = outinputs
        self.outputs = output
        return outinputs, output
Exemplo n.º 49
0
 def build(self, inps):  # data: (batsize, ...)
     batsize = inps[0].shape[0]
     inits = self.model.get_init_info(*(list(self.buildargs)+[batsize]))
     nonseqs = []
     if isinstance(inits, tuple):
         nonseqs = inits[1]
         inits = inits[0]
     inpvars = [Input(ndim=inp.ndim, dtype=inp.dtype) for inp in inps]
     if self.transf is not None:
         tinpvars = self.transf(*inpvars)
         if not issequence(tinpvars):
             tinpvars = (tinpvars,)
         tinpvars = list(tinpvars)
     else:
         tinpvars = inpvars
     statevars = [self.wrapininput(x) for x in inits]
     nonseqvars = [self.wrapininput(x) for x in nonseqs]
     out = self.model.rec(*(tinpvars + statevars + nonseqvars))
     alloutvars = out
     self.f = theano.function(inputs=[x.d for x in inpvars + statevars + nonseqvars],
                              outputs=[x.d for x in alloutvars],
                              on_unused_input="warn")
     self.statevals = [self.evalstate(x) for x in inits]
     self.nonseqvals = [self.evalstate(x) for x in nonseqs]
Exemplo n.º 50
0
 def apply(self, fn, **kwargs):
     self.params.extend(recurfilter(lambda x: isinstance(x, Parameter), kwargs))
     trueargs = recurmap(lambda x: x.d if hasattr(x, "d") else x, kwargs)
     o, updates = theano.scan(self.fnwrap(fn), **trueargs)
     ret = [Var(oe) for oe in o] if issequence(o) else Var(o)
     return ret, updates
Exemplo n.º 51
0
 def get_init_info(self, initstates):
     info, red = self.do_get_init_info(initstates)
     assert((issequence(red) and len(red) == 0) or (not issequence(red)))
     return info
Exemplo n.º 52
0
 def do_get_init_info(self, initstates):    # either a list of init states or the batsize
     if issequence(initstates):
         return [initstates[0]], initstates[1:]
     else:
         return [T.zeros((initstates, self.innerdim))], initstates