示例#1
0
def dropout(X, p=0.):
    """
    dropout using activation scaling to avoid test time weight rescaling
    """
    if p > 0:
        retain_prob = 1 - p
        X *= t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
        X /= retain_prob
    return X
示例#2
0
def dropout(X, p=0.):
    """
    dropout using activation scaling to avoid test time weight rescaling
    """
    if p > 0:
        retain_prob = 1 - p
        X *= t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
        X /= retain_prob
    return X
示例#3
0
文件: ops.py 项目: drmingle/Foxhound
 def op(self, state):
     X = self.l_in.op(state=state)
     retain_prob = 1 - self.p_drop  
     if state['dropout']:
         X = X / retain_prob * t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
     return X
示例#4
0
 def op(self, state):
     X = self.l_in.op(state=state)
     retain_prob = 1 - self.p_drop  
     if state['dropout']:
         X = X / retain_prob * t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
     return X