def fit(self, x, batch_size=100, n_epoch=10, callbacks=None, validation_data=None, shuffle=True, initial_epoch=0): """Trains the model for a fixed number of epochs (iterations on a dataset). # Arguments x: Theano shared array of training data batch_size: integer. Number of samples per gradient update. n_epoch: integer, the number of times to iterate over the training data arrays. callbacks: list of callbacks to be called during training. validation_data: Theano shared array of data on which to evaluate the loss and any model metrics at the end of each epoch. The model will not be trained on this data. shuffle: boolean, whether to shuffle the training data before each epoch. initial_epoch: epoch at which to start training (useful for resuming a previous training run) # Returns A `History` instance. Its `history` attribute contains all information collected during training. """ self.train_data = x self.n_train_sample = B.eval(x.shape[0]) self.validation_data = validation_data # makes the generic indices to access data self.train_index = B.placeholder(shape=(batch_size,), dtype=B.intx(), name='train_index') # makes the training functions self._make_train_function() f = self.train_function # preps for validation out_labels = ['cost'] if validation_data: self.valid_index = B.placeholder(shape=(batch_size,), dtype=B.intx(), name='valid_index') callback_metrics = copy.copy(out_labels) + ['val_' + n for n in out_labels] self._make_validation_function() val_f = self.validation_function else: callback_metrics = copy.copy(out_labels) val_f = None # delegate logic to _fit_loop return self._fit_loop(f, out_labels=out_labels, batch_size=batch_size, n_epoch=n_epoch, callbacks=callbacks, val_f=val_f, shuffle=shuffle, callback_metrics=callback_metrics, initial_epoch=initial_epoch)
def predict_on_batch(self, x): """Runs a single gradient update on a single batch of data. # Arguments x: Numpy array of training data, or list of Numpy arrays if the model has multiple inputs. If all inputs in the model are named, you can also pass a dictionary mapping input names to Numpy arrays. # Returns Scalar training loss (if the model has a single output and no metrics) or list of scalars (if the model has multiple outputs and/or metrics). """ # makes the generic indices to access data batch_size = B.eval(x.shape)[0] self.test_index = B.placeholder(shape=(batch_size,), dtype=B.intx(), name='test_index') self.test_data = x index = np.arange(batch_size) self._make_test_function() outputs = self.test_function(index) return outputs
def run_logZ(self): """Performs calculatations of AIS runs. Must be called before estimates. """ # initial sample sample_ls = self.init_sample_ls # this is the inital beta=0 case log_ais_w = B.eval(self.dbm_a.free_energy_sumover_even(sample_ls, 1.0)) log_ais_w = B.variable(log_ais_w, name='log_ais_w') index = B.variable(1, name='index', dtype=B.intx()) scan_out, updates = B.scan(self._update, outputs_info=[log_ais_w, index] + sample_ls, n_steps=self.n_betas - 2, name='scan_ais') log_ais_w = scan_out[0][-1] sample_ls = [s[-1] for s in scan_out[2:]] # this is the final beta=1 case log_ais_w -= self.dbm_b.free_energy_sumover_even(sample_ls, 1.0) logZ_fn = B.function([], [log_ais_w], updates=updates) self.logZ = self.logZa + logZ_fn()
def get_index_examples(self, num_each): # Returns examples of each label index = [] for i in range(self.num_cat): index += list(np.where(i == self.lbl)[0][0:num_each]) return np.array(index, dtype=B.intx())