def transform(self, X, y=None): test_set_x, _ = dataset.shared_dataset(global_theano, global_T, X, borrow=True) # pick random test examples, with which to initialize the persistent chain persistent_vis_chain = global_theano.shared( np.asarray(test_set_x.get_value(borrow=True), dtype=global_theano.config.floatX)) [presig_hids, hid_mfs, hid_samples, presig_vis, vis_mfs, vis_samples], updates = \ global_theano.scan( self.rbm.gibbs_vhv, outputs_info=[None, None, None, None, None, persistent_vis_chain], n_steps=1) # add to updates the shared variable that takes care of our persistent # chain :. #updates.update({persistent_vis_chain: vis_samples[-1]}) # construct the function that implements our persistent chain. # we generate the "mean field" activations for plotting and the actual # samples for reinitializing the state of our persistent chain sample_fn = global_theano.function( [], [hid_mfs[-1], hid_samples[-1], vis_mfs[-1], vis_samples[-1]], name='sample_fn') ident = random.randint(0, 500) all_hid_mfs = [] all_vis_sample = [] all_hid_sample = [] for i in range(self.n_resamples): hid_mfs, hid_sample, vis_mfs, vis_sample = sample_fn() all_hid_mfs.append(hid_mfs) all_hid_sample.append(hid_sample) all_vis_sample.append(vis_sample) hidden_mean_field = np.mean(all_hid_mfs, axis=0) visible_mean_field = np.mean(all_vis_sample, axis=0) print "all_hid_mfs shape", np.shape(all_hid_mfs) print "Hidden mean field", np.shape(hidden_mean_field) print "Shapes", np.shape(hidden_mean_field), np.shape(all_hid_mfs) #self.sample_all(X, all_hid_sample, all_vis_sample, ident) #return hidden_mean_field return visible_mean_field
def transform(self, X, y=None): test_set_x, _ = dataset.shared_dataset(global_theano, global_T, X, borrow=True) # pick random test examples, with which to initialize the persistent chain persistent_vis_chain = global_theano.shared(np.asarray(test_set_x.get_value(borrow=True), dtype=global_theano.config.floatX)) [presig_hids, hid_mfs, hid_samples, presig_vis, vis_mfs, vis_samples], updates = \ global_theano.scan( self.rbm.gibbs_vhv, outputs_info=[None, None, None, None, None, persistent_vis_chain], n_steps=1) # add to updates the shared variable that takes care of our persistent # chain :. #updates.update({persistent_vis_chain: vis_samples[-1]}) # construct the function that implements our persistent chain. # we generate the "mean field" activations for plotting and the actual # samples for reinitializing the state of our persistent chain sample_fn = global_theano.function( [], [hid_mfs[-1], hid_samples[-1], vis_mfs[-1], vis_samples[-1]], name='sample_fn') ident = random.randint(0, 500) all_hid_mfs = [] all_vis_sample = [] all_hid_sample = [] for i in range(self.n_resamples): hid_mfs, hid_sample, vis_mfs, vis_sample = sample_fn() all_hid_mfs.append(hid_mfs) all_hid_sample.append(hid_sample) all_vis_sample.append(vis_sample) hidden_mean_field = np.mean(all_hid_mfs, axis=0) visible_mean_field = np.mean(all_vis_sample, axis=0) print "all_hid_mfs shape", np.shape(all_hid_mfs) print "Hidden mean field", np.shape(hidden_mean_field) print "Shapes", np.shape(hidden_mean_field), np.shape(all_hid_mfs) #self.sample_all(X, all_hid_sample, all_vis_sample, ident) #return hidden_mean_field return visible_mean_field
def fit(self, X, y=None): global global_theano global global_T global global_RandomStreams log.debug(u"RBM Fitting with lr={0} epochs={1} n_hidden={2}".format( self.learning_rate, self.training_epochs, self.n_hidden)) ## This prevents us from multiple importing theano which is important ## since it performs some global initialization, especially for cuda if not global_theano: log.debug(u"Importing Theano") import theano import theano.tensor as T from theano.tensor.shared_randomstreams import RandomStreams theano.config.warn.subtensor_merge_bug = False global_theano = theano global_T = T global_RandomStreams = RandomStreams self.rng = np.random.RandomState(123456) self.theano_rng = global_RandomStreams(self.rng.randint(2**30)) self.n_visible = np.shape(X)[1] #log.debug(u"RBM Featureset has {0} visible nodes".format( #self.n_visible)) train_x, train_y = dataset.shared_dataset(global_theano, global_T, X, y, borrow=True) self.init_objects(train_x) self.train(train_x) return self
def fit(self, X, y=None): global global_theano global global_T global global_RandomStreams log.debug(u"RBM Fitting with lr={0} epochs={1} n_hidden={2}".format( self.learning_rate, self.training_epochs, self.n_hidden)) ## This prevents us from multiple importing theano which is important ## since it performs some global initialization, especially for cuda if not global_theano: log.debug(u"Importing Theano") import theano import theano.tensor as T from theano.tensor.shared_randomstreams import RandomStreams theano.config.warn.subtensor_merge_bug = False global_theano = theano global_T = T global_RandomStreams = RandomStreams self.rng = np.random.RandomState(123456) self.theano_rng = global_RandomStreams(self.rng.randint(2 ** 30)) self.n_visible = np.shape(X)[1] #log.debug(u"RBM Featureset has {0} visible nodes".format( #self.n_visible)) train_x, train_y = dataset.shared_dataset(global_theano, global_T, X, y, borrow=True) self.init_objects(train_x) self.train(train_x) return self