def dropout(X, p=0.): """ dropout using activation scaling to avoid test time weight rescaling """ if p > 0: retain_prob = 1 - p X *= t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX) X /= retain_prob return X
def op(self, state): X = self.l_in.op(state=state) retain_prob = 1 - self.p_drop if state['dropout']: X = X / retain_prob * t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX) return X