def _train_model(self, train_set, train_labels, validation_set, validation_labels): """ Train the model. :param train_set: training set :param train_labels: training labels :param validation_set: validation set :param validation_labels: validation labels :return: self """ for i in range(self.num_epochs): shuff = zip(train_set, train_labels) np.random.shuffle(shuff) batches = [ _ for _ in utilities.gen_batches(zip(train_set, train_labels), self.batch_size) ] for batch in batches: x_batch, y_batch = zip(*batch) self.tf_session.run(self.train_step, feed_dict={ self.input_data: x_batch, self.input_labels: y_batch }) if validation_set is not None: feed = { self.input_data: validation_set, self.input_labels: validation_labels } self._run_supervised_validation_error_and_summaries(i, feed)
def _run_train_step(self, train_X): """Run a training step. A training step is made by randomly corrupting the training set, randomly shuffling it, divide it into batches and run the optimizer for each batch. Parameters ---------- train_X : array_like Training data, shape (num_samples, num_features). Returns ------- self """ x_corrupted = utilities.corrupt_input( train_X, self.tf_session, self.corr_type, self.corr_frac) shuff = list(zip(train_X, x_corrupted)) np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches(shuff, self.batch_size)] for batch in batches: x_batch, x_corr_batch = zip(*batch) tr_feed = {self.input_data_orig: x_batch, self.input_data: x_corr_batch} self.tf_session.run(self.train_step, feed_dict=tr_feed)
def _train_model(self, train_set, train_ref, validation_set, validation_ref): """ Train the model. :param train_set: training set :param train_ref: training reference data :param validation_set: validation set :param validation_ref: validation reference data :return: self """ shuff = zip(train_set, train_ref) for i in range(self.num_epochs): np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches(shuff, self.batch_size)] for batch in batches: x_batch, y_batch = zip(*batch) self.tf_session.run(self.train_step, feed_dict={self.input_data: x_batch, self.input_labels: y_batch, self.keep_prob: self.dropout}) if validation_set is not None: feed = {self.input_data: validation_set, self.input_labels: validation_ref, self.keep_prob: 1} self._run_validation_error_and_summaries(i, feed)
def _train_model(self, train_set, train_ref, validation_set, validation_ref): """Train the model. :param train_set: training set :param train_ref: training reference data :param validation_set: validation set :param validation_ref: validation reference data :return: self """ shuff = zip(train_set, train_ref) for i in range(self.num_epochs): np.random.shuffle(shuff) batches = [ _ for _ in utilities.gen_batches(shuff, self.batch_size) ] for batch in batches: x_batch, y_batch = zip(*batch) self.tf_session.run(self.train_step, feed_dict={ self.input_data: x_batch, self.input_labels: y_batch, self.keep_prob: self.dropout }) if validation_set is not None: feed = { self.input_data: validation_set, self.input_labels: validation_ref, self.keep_prob: 1 } self._run_validation_error_and_summaries(i, feed)
def _train_model(self, train_set, train_labels, validation_set, validation_labels): """Train the model. :param train_set: training set :param train_labels: training labels :param validation_set: validation set :param validation_labels: validation labels :return: self """ shuff = list(zip(train_set, train_labels)) pbar = tqdm(range(self.num_epochs)) for i in pbar: np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches( shuff, self.batch_size)] for batch in batches: x_batch, y_batch = zip(*batch) self.tf_session.run( self.train_step, feed_dict={ self.input_data: x_batch, self.input_labels: y_batch, self.keep_prob: self.dropout}) if validation_set is not None: feed = {self.input_data: validation_set, self.input_labels: validation_labels, self.keep_prob: 1} acc = tf_utils.run_summaries( self.tf_session, self.tf_merged_summaries, self.tf_summary_writer, i, feed, self.accuracy) pbar.set_description("Accuracy: %s" % (acc))
def _train_model(self, train_set, train_ref, validation_set, validation_ref): """Train the model. :param train_set: training set :param train_ref: training reference data :param validation_set: validation set :param validation_ref: validation reference data :return: self """ shuff = list(zip(train_set, train_ref)) pbar = tqdm(range(self.num_epochs)) for i in pbar: np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches( shuff, self.batch_size)] for batch in batches: x_batch, y_batch = zip(*batch) self.tf_session.run( self.train_step, feed_dict={self.input_data: x_batch, self.input_labels: y_batch, self.keep_prob: self.dropout}) if validation_set is not None: feed = {self.input_data: validation_set, self.input_labels: validation_ref, self.keep_prob: 1} err = tf_utils.run_summaries( self.tf_session, self.tf_merged_summaries, self.tf_summary_writer, i, feed, self.cost) pbar.set_description("Reconstruction loss: %s" % (err))
def _train_model(self, train_set, train_labels, validation_set, validation_labels): """Train the model. :param train_set: training set :param train_labels: training labels :param validation_set: validation set :param validation_labels: validation labels :return: self """ pbar = tqdm(range(self.num_epochs)) for i in pbar: shuff = list(zip(train_set, train_labels)) np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches(shuff, self.batch_size)] for batch in batches: x_batch, y_batch = zip(*batch) self.tf_session.run( self.train_step, feed_dict={self.input_data: x_batch, self.input_labels: y_batch}) if validation_set is not None: feed = {self.input_data: validation_set, self.input_labels: validation_labels} acc = tf_utils.run_summaries( self.tf_session, self.tf_merged_summaries, self.tf_summary_writer, i, feed, self.accuracy) pbar.set_description("Accuracy: %s" % (acc))
def _train_model(self, train_set, train_labels, validation_set, validation_labels): """Train the model. :param train_set: training set :param train_labels: training labels :param validation_set: validation set :param validation_labels: validation labels :return: self """ for i in range(self.num_epochs): shuff = zip(train_set, train_labels) np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches( zip(train_set, train_labels), self.batch_size)] for batch in batches: x_batch, y_batch = zip(*batch) self.tf_session.run( self.train_step, feed_dict={self.input_data: x_batch, self.input_labels: y_batch}) if validation_set is not None: feed = {self.input_data: validation_set, self.input_labels: validation_labels} self._run_validation_error_and_summaries(i, feed)
def _run_train_step(self, train_set): """ Run a training step. A training step is made by randomly shuffling the training set, divide into batches and run the variable update nodes for each batch. :param train_set: training set :return: self """ np.random.shuffle(train_set) batches = [_ for _ in utilities.gen_batches(train_set, self.batch_size)] updates = [self.w_upd8, self.bh_upd8, self.bv_upd8] for batch in batches: self.tf_session.run(updates, feed_dict=self._create_feed_dict(batch))
def _run_train_step(self, train_set): """ Run a training step. A training step is made by randomly corrupting the training set, randomly shuffling it, divide it into batches and run the optimizer for each batch. :param train_set: training set :return: self """ x_corrupted = self._corrupt_input(train_set) shuff = zip(train_set, x_corrupted) np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches(shuff, self.batch_size)] for batch in batches: x_batch, x_corr_batch = zip(*batch) tr_feed = {self.input_data: x_batch, self.input_data_corr: x_corr_batch} self.tf_session.run(self.train_step, feed_dict=tr_feed)
def _run_train_step(self, train_set): """ Run a training step. A training step is made by randomly corrupting the training set, randomly shuffling it, divide it into batches and run the optimizer for each batch. :param train_set: training set :return: self """ x_corrupted = self._corrupt_input(train_set) shuff = zip(train_set, x_corrupted) np.random.shuffle(shuff) batches = [_ for _ in utilities.gen_batches(shuff, self.batch_size)] for batch in batches: x_batch, x_corr_batch = zip(*batch) tr_feed = { self.input_data_orig: x_batch, self.input_data: x_corr_batch } self.tf_session.run(self.train_step, feed_dict=tr_feed)
def _train_model(self, train_set, train_ref, validation_set, validation_ref): """Train the model. :param train_set: training set :param train_ref: training reference data :param validation_set: validation set :param validation_ref: validation reference data :return: self """ shuff = list(zip(train_set, train_ref)) pbar = tqdm(list(range(self.num_epochs))) for i in pbar: np.random.shuffle(shuff) batches = [ _ for _ in utilities.gen_batches(shuff, self.batch_size) ] for batch in batches: x_batch, y_batch = list(zip(*batch)) self.tf_session.run(self.train_step, feed_dict={ self.input_data: x_batch, self.input_labels: y_batch, self.keep_prob: self.dropout }) if validation_set is not None: feed = { self.input_data: validation_set, self.input_labels: validation_ref, self.keep_prob: 1 } err = tf_utils.run_summaries(self.tf_session, self.tf_merged_summaries, self.tf_summary_writer, i, feed, self.cost) pbar.set_description("Reconstruction loss: %s" % (err))