Пример #1
0
 def innerapply(self, x, _trainmode=False):  # x is probabilities??
     if _trainmode or self._debug:
         rng = RVal(self.seed)
         shap = self._shape if self._shape is not None else x.shape
         g = rng.gumbel(shap)
         y = (T.log(x) + g) / self.temp
         ret = T.softmax(y, x.mask)
         ret.mask = x.mask
         return ret
     else:
         return T.softmax(x, x.mask, temperature=self._det_sm_temp)
Пример #2
0
 def __init__(self,
              p=0.3,
              seed=None,
              rescale=True,
              _alwaysrandom=False,
              **kw):
     super(Dropout, self).__init__(**kw)
     if seed is None:
         seed = np.random.randint(0, 1e6)
     self.p = 0.0 if (p is False or p is None) else 0.3 if p is True else p
     self.rescale = rescale
     self.seed = seed
     self._debug = _alwaysrandom
     self.rval = RVal(self.seed)
Пример #3
0
 def apply(self, x, _trainmode=False):
     if (_trainmode or self._debug) and self.p > 0:
         xmask = x.mask
         if self.rescale:
             one = T.constant(1)
             x /= one - self.p
         rng = RVal(self.seed)
         rv = rng.binomial(x.shape, p=1 - self.p, dtype=x.dtype)
         x = x * rv
         #print "done dropout"
         x.mask = xmask
         # x.push_extra_outs({"dropout{}".format(np.random.randint(100, 199)): rv})
         return x
     else:
         return x
Пример #4
0
class Dropout(Block):
    def __init__(self,
                 p=0.3,
                 seed=None,
                 rescale=True,
                 _alwaysrandom=False,
                 **kw):
        super(Dropout, self).__init__(**kw)
        if seed is None:
            seed = np.random.randint(0, 1e6)
        self.p = 0.0 if (p is False or p is None) else 0.3 if p is True else p
        self.rescale = rescale
        self.seed = seed
        self._debug = _alwaysrandom
        self.rval = RVal(self.seed)

    def apply(self, x, _trainmode=False):
        if (_trainmode or self._debug) and self.p > 0:
            #print "Dropout: YES"
            xmask = x.mask
            if self.rescale:
                one = T.constant(1)
                x /= one - self.p
            #rng = RVal(self.seed)
            rv = self.rval.binomial(x.shape, p=1 - self.p, dtype=x.dtype)
            x = x * rv
            #print "done dropout"
            x.mask = xmask
            # x.push_extra_outs({"dropout{}".format(np.random.randint(100, 199)): rv})
            return x
        else:
            #print "Dropout: NO"
            return x
Пример #5
0
 def __init__(self, **kw):
     super(RandomSequence, self).__init__(**kw)
     self.randval = RVal().normal((5, ))
Пример #6
0
 def rec(self, x_t):
     return RVal().normal(x_t.shape)
Пример #7
0
 def apply(self, x):     # (batsize,)
     m = self.W_m[x]
     s = self.W_s[x]
     z = RVal(seed=self.seed).normal(m.shape) * s + m
     o = T.dot(z, self.O)
     return Softmax()(o)
Пример #8
0
 def test_value_shape(self):
     rv = RVal().binomial((25, ), p=0.5)
     self.assertEqual(rv.d.shape.eval(), (25, ))