def __init__(self, shapes, queue, noise=None): # Communication queue for the log self.queue = queue # Semantic variables for the input, corruption level # and learning rate X = T.matrix('X') self.inputs = X cl = T.scalar(dtype=theano.config.floatX, name='corruption level') self.cl = cl lr = T.scalar(dtype=theano.config.floatX, name='learning rate') self.lr = lr # Random number generators used for the noise np_rng = np.random.RandomState() theano_rng = RandomStreams(np_rng.randint(2**30)) # Layers initialisation, cast the shape # and fill the layers list. self.layers = [] self.mask = [] self.shapes = shapes (nv,_,_) = shapes[0] self.params = [] self.params_ft = [] output = X sample_up = X # Compute the droupout training function p_do = 0.5 self.p_do = p_do dropout_out =X rec_do = X # Build the layers, linking each one to the next # Fill the param list for i, s in enumerate(shapes[1:]): lay = RBM(nv, s[0], output, v_unit=s[2], unit_type=s[1]) self.layers.append(lay) self.params += lay.params self.params_ft += lay.params_ft nv = s[0] output = lay.up(output) sample_up = lay.sample_h_given_v(sample_up) if i != 0: mask = theano_rng.binomial(size=dropout_out.shape, n=1, p=p_do) dropout_out *= mask rec_do *= p_do dropout_out = lay.up(dropout_out) rec_do = lay.up(rec_do) # Define the up functions self.code = output self.sample_up = sample_up # Prepare the variables to decode self.N = len(self.layers) recstr = output decode = X sample_down = X sample = sample_up # Add noise to the output for the fine tuning part self.noise = noise if self.noise == 'MASK': fine_tune = T.clip(output * \ theano_rng.binomial( size=output.shape, n=1, p=1-cl),0.,1.) elif self.noise == 'GAUSS': fine_tune = T.clip(output +\ theano_rng.normal( size=output.shape, std=cl), 0.,1.) else: fine_tune = output # Down sample every variable for i in range(1, self.N+1): lay = self.layers[self.N-i] recstr = lay.down(recstr) decode = lay.down(decode) fine_tune = lay.down(fine_tune) sample_dowm = lay.sample_v_given_h(sample_down) sample = lay.sample_v_given_h(sample) if i!= self.N: rec_do *= p_do mask = theano_rng.binomial(size=dropout_out.shape, n=1, p=p_do) dropout_out *= mask dropout_out = lay.down(dropout_out) rec_do = lay.down(rec_do) #define the sampeling and decoding functions self.recstr = recstr self.decode = decode self.ft = fine_tune self.do = dropout_out self.sample_down = sample_down self.sample = sample self.compile()
def __init__(self, shapes, queue, noise=None): # Communication queue for the log self.queue = queue # Semantic variables for the input, corruption level # and learning rate X = T.matrix('X') self.inputs = X cl = T.scalar(dtype=theano.config.floatX, name='corruption level') self.cl = cl lr = T.scalar(dtype=theano.config.floatX, name='learning rate') self.lr = lr # Random number generators used for the noise np_rng = np.random.RandomState() theano_rng = RandomStreams(np_rng.randint(2**30)) # Layers initialisation, cast the shape # and fill the layers list. self.layers = [] self.mask = [] self.shapes = shapes (nv, _, _) = shapes[0] self.params = [] self.params_ft = [] output = X sample_up = X # Compute the droupout training function p_do = 0.5 self.p_do = p_do dropout_out = X rec_do = X # Build the layers, linking each one to the next # Fill the param list for i, s in enumerate(shapes[1:]): lay = RBM(nv, s[0], output, v_unit=s[2], unit_type=s[1]) self.layers.append(lay) self.params += lay.params self.params_ft += lay.params_ft nv = s[0] output = lay.up(output) sample_up = lay.sample_h_given_v(sample_up) if i != 0: mask = theano_rng.binomial(size=dropout_out.shape, n=1, p=p_do) dropout_out *= mask rec_do *= p_do dropout_out = lay.up(dropout_out) rec_do = lay.up(rec_do) # Define the up functions self.code = output self.sample_up = sample_up # Prepare the variables to decode self.N = len(self.layers) recstr = output decode = X sample_down = X sample = sample_up # Add noise to the output for the fine tuning part self.noise = noise if self.noise == 'MASK': fine_tune = T.clip(output * \ theano_rng.binomial( size=output.shape, n=1, p=1-cl),0.,1.) elif self.noise == 'GAUSS': fine_tune = T.clip(output +\ theano_rng.normal( size=output.shape, std=cl), 0.,1.) else: fine_tune = output # Down sample every variable for i in range(1, self.N + 1): lay = self.layers[self.N - i] recstr = lay.down(recstr) decode = lay.down(decode) fine_tune = lay.down(fine_tune) sample_dowm = lay.sample_v_given_h(sample_down) sample = lay.sample_v_given_h(sample) if i != self.N: rec_do *= p_do mask = theano_rng.binomial(size=dropout_out.shape, n=1, p=p_do) dropout_out *= mask dropout_out = lay.down(dropout_out) rec_do = lay.down(rec_do) #define the sampeling and decoding functions self.recstr = recstr self.decode = decode self.ft = fine_tune self.do = dropout_out self.sample_down = sample_down self.sample = sample self.compile()