def __init__(self, activation_fcn, rng=None, shape=None): """ :param activation_fcn: A string identifying the type of activation function. {'bernoulli', 'gaussian', 'adaptive_gaussian', 'rect-lin'} :param rng: Numpy random number generator for the stochastic component :param shape: Optionally, reshape the output to this shape. """ rng = RandomStreams(rng.randint(1e9) if rng is not None else None) self.activation_fcn = activation_fcn self._smooth_activation_fcn, self._stochastic_activation_fcn, self._free_energy_fcn, self._params = \ self._stochastic_layer_name_to_functions(activation_fcn, rng) self._shape = shape
def __init__(self, activation_fcn, rng = None, shape = None): """ :param activation_fcn: A string identifying the type of activation function. {'bernoulli', 'gaussian', 'adaptive_gaussian', 'rect-lin'} :param rng: Numpy random number generator for the stochastic component :param shape: Optionally, reshape the output to this shape. """ rng = RandomStreams(rng.randint(1e9) if rng is not None else None) self.activation_fcn = activation_fcn self._smooth_activation_fcn, self._stochastic_activation_fcn, self._free_energy_fcn, self._params = \ self._stochastic_layer_name_to_functions(activation_fcn, rng) self._shape = shape
def test_indeterministic_reconstruct_scan_vs_theano(self): self.setUpRBM() self.assertTrue(self.rbm.h_n == 10) rbm = self.rbm W = rbm.W.get_value(borrow=True) U = rbm.U.get_value(borrow=True) vb1 = rbm.v_bias.eval() vb2 = rbm.v_bias2.eval() hb = rbm.h_bias.eval() k = 100 # Initial values rand = np.random.RandomState(123) rand = RandomStreams(rand.randint(2 ** 30)) x1 = self.rbmx1 x2 = rand.binomial(size=self.rbmx2.shape, n=1, p=0.5, dtype=t_float_x).eval() def gibbs(ux, u2): h, hp = rbm.prop_up(ux, u2) hs = rbm.rand.binomial(size=hp.shape, n=1, p=hp, dtype=t_float_x) v, vp = rbm.prop_down(hs) vs = rbm.rand.binomial(size=vp.shape, n=1, p=vp, dtype=t_float_x) v2, v2p = rbm.prop_down_assoc(hs) v2s = rbm.rand.binomial(size=v2p.shape, n=1, p=v2p, dtype=t_float_x) return [h, hp, hs, v, v2p, ux, v2, v2p, v2s] # THEANO x = T.dmatrix("x") y = T.dmatrix("y") x_start = x y_start = y ( res, updates ) = theano.scan( gibbs, outputs_info=[None, None, None, None, None, x_start, None, None, y_start], n_steps=k ) f = theano.function([x, y], res, updates=updates) rand = np.random.RandomState(1234) rand = RandomStreams(rand.randint(2 ** 30)) rbm.rand = rand [h, hp, hs, v, vp, vs, v2, v2p, v2s] = f(self.rbmx1, x2) # print h # print hp # print "h: \n{}".format(h) # print "hp: \n{}".format(hp) # print "hs: \n{}".format(hs) # print "v: \n{}".format(v) # print "vp: \n{}".format(vp) # print "vs: \n{}".format(vs) # print "v2: \n{}".format(v) # print "v2p: \n{}".format(v2p) # print "v2s: \n{}".format(v2s) # =============== NUMPY ================ rand = np.random.RandomState(1234) rand = RandomStreams(rand.randint(2 ** 30)) for i in xrange(0, k): # Sample h h, ph = np_prop_up(x1, W, hb, x2, U) # sample using same seed hs = rand.binomial(size=ph.shape, n=1, p=ph, dtype=t_float_x).eval() # print h # Sample x, x2 u, pu = np_prop_down(hs, W, vb1) # dummy call, just to adjust seed us = rand.binomial(size=pu.shape, n=1, p=pu, dtype=t_float_x).eval() u2, pu2 = np_prop_down(hs, U, vb2) x2 = pu2 x2 = rand.binomial(size=pu2.shape, n=1, p=pu2, dtype=t_float_x).eval()
def test_indeterministic_reconstruct_scan_vs_theano(self): self.setUpRBM() self.assertTrue(self.rbm.h_n == 10) rbm = self.rbm W = rbm.W.get_value(borrow=True) U = rbm.U.get_value(borrow=True) vb1 = rbm.v_bias.eval() vb2 = rbm.v_bias2.eval() hb = rbm.h_bias.eval() k = 100 # Initial values rand = np.random.RandomState(123) rand = RandomStreams(rand.randint(2**30)) x1 = self.rbmx1 x2 = rand.binomial(size=self.rbmx2.shape, n=1, p=0.5, dtype=t_float_x).eval() def gibbs(ux, u2): h, hp = rbm.prop_up(ux, u2) hs = rbm.rand.binomial(size=hp.shape, n=1, p=hp, dtype=t_float_x) v, vp = rbm.prop_down(hs) vs = rbm.rand.binomial(size=vp.shape, n=1, p=vp, dtype=t_float_x) v2, v2p = rbm.prop_down_assoc(hs) v2s = rbm.rand.binomial(size=v2p.shape, n=1, p=v2p, dtype=t_float_x) return [h, hp, hs, v, v2p, ux, v2, v2p, v2s] # THEANO x = T.dmatrix("x") y = T.dmatrix("y") x_start = x y_start = y (res, updates) = theano.scan(gibbs, outputs_info=[ None, None, None, None, None, x_start, None, None, y_start ], n_steps=k) f = theano.function([x, y], res, updates=updates) rand = np.random.RandomState(1234) rand = RandomStreams(rand.randint(2**30)) rbm.rand = rand [h, hp, hs, v, vp, vs, v2, v2p, v2s] = f(self.rbmx1, x2) # print h # print hp # print "h: \n{}".format(h) # print "hp: \n{}".format(hp) # print "hs: \n{}".format(hs) # print "v: \n{}".format(v) # print "vp: \n{}".format(vp) # print "vs: \n{}".format(vs) # print "v2: \n{}".format(v) # print "v2p: \n{}".format(v2p) # print "v2s: \n{}".format(v2s) # =============== NUMPY ================ rand = np.random.RandomState(1234) rand = RandomStreams(rand.randint(2**30)) for i in xrange(0, k): # Sample h h, ph = np_prop_up(x1, W, hb, x2, U) # sample using same seed hs = rand.binomial(size=ph.shape, n=1, p=ph, dtype=t_float_x).eval() # print h # Sample x, x2 u, pu = np_prop_down(hs, W, vb1) # dummy call, just to adjust seed us = rand.binomial(size=pu.shape, n=1, p=pu, dtype=t_float_x).eval() u2, pu2 = np_prop_down(hs, U, vb2) x2 = pu2 x2 = rand.binomial(size=pu2.shape, n=1, p=pu2, dtype=t_float_x).eval()