示例#1
0
 def step(z, f, o, c):
     '''
     Runs fo-pooling at each time step
     '''
     c = f * c + (1 - f) * z
     
     if opt.att: # attention
         a = tf.nn.softmax(tf.einsum("ijk,ik->ij", H, c)) # alpha. (16, 150) 
         k = (a.sg_expand_dims() * H).sg_sum(axis=1) # attentional sum. (16, 150) 
         h = o * (k.sg_dense(act="linear") + c.sg_dense(act="linear"))
     else:
         h = o * c
     
     return h, c # hidden states, (new) cell memories
    def step(z, f, o, c):
        '''
        Runs fo-pooling at each time step
        '''
        c = f * c + (1 - f) * z

        if opt.att:  # attention
            a = tf.nn.softmax(tf.einsum("ijk,ik->ij", H,
                                        c))  # alpha. (b, seqlen)
            k = (a.sg_expand_dims() * H).sg_sum(
                axis=1)  # attentional sum. (b, seqlen)
            h = o * (k.sg_dense_gpus(act="linear",name = "k%d_%s"%(t,opt.name),dev = opt.dev,reuse=opt.reuse_vars)\
                + c.sg_dense_gpus(act="linear",name = "c%d_%s"%(t,opt.name),dev = opt.dev,reuse=opt.reuse_vars))
        else:
            h = o * c

        return h, c  # hidden states, (new) cell memories