def __init__(self, file): if os.path.isfile(file): self.__dict__ = json.loads( open(file, encoding='utf-8', errors='ignore').read()) log('Loaded config from file: \'' + file + '\'.') else: log('Error loading config from file: \'' + file + '\'.')
def evaluate(self, data_x, data_y, eval_type='training'): x, y, keep_prob = self.placeholders['x'], self.placeholders[ 'y'], self.placeholders['keep_prob'] num_examples = data_x.shape[0] total_accuracy = 0 batch_size = self.get_property('batch_size') sess = tf.get_default_session() for offset in range(0, num_examples, batch_size): self.module_log( 'Evaluating epoch - {0}: {1}/{2} - {3:.2f}% complete ' .format(eval_type, offset, num_examples, offset / num_examples * 100), end='\r', log_to_file=False) batch_x, batch_y = data_x[offset:offset + batch_size], data_y[offset:offset + batch_size] accuracy = sess.run(self.training_pipeline['accuracy_operation'], feed_dict={ x: batch_x, y: batch_y, keep_prob: self.get_property('keep_prob_eval') }) total_accuracy += accuracy * len(batch_x) util.log() return total_accuracy / num_examples
def module_log(self, msg, end='\n', log_to_file=True): util.log('[' + self.module_name + ']: ' + msg, end=end, log_to_file=log_to_file)
def train(self, train: tuple, val: tuple): epochs = self.get_property('epochs') batches_per_epoch = self.get_property('batches_per_epoch') batch_size = self.get_property('batch_size') x_train, y_train = train[0], train[1] x_val, y_val = val[0], val[1] x, y, keep_prob = self.placeholders['x'], self.placeholders[ 'y'], self.placeholders['keep_prob'] with tf.Session() as sess: sess.run(tf.global_variables_initializer()) log_path = path.join( construct_weights_path(self.module_name, self.config, return_root=True), self.config.epochs_log) with open(log_path, 'w') as f: for epoch in range(epochs): self.module_log( "\033[94m---- Starting epoch {0}/{1} ----\033[0m". format(epoch + 1, epochs)) batch_counter = 0 for batch_x, batch_y in self.image_datagen.flow( x_train, y_train, batch_size=batch_size): batch_counter += 1 sess.run(self.training_pipeline['train_step'], feed_dict={ x: batch_x, y: batch_y, keep_prob: self.get_property('keep_prob_train') }) self.module_log( 'Processing batch {0}/{1} - {2:.2f}% complete ' .format(batch_counter, batches_per_epoch, batch_counter / batches_per_epoch * 100), end='\r', log_to_file=False) if batch_counter == batches_per_epoch: break util.log() train_accuracy = self.evaluate(x_train, y_train, eval_type='training') val_accuracy = self.evaluate(x_val, y_val, eval_type='validation') result = 'Epoch {} -- Train accuracy: {:.3f} | Validation accuracy: {:.3f}'.format( epoch + 1, train_accuracy, val_accuracy) self.module_log(result) f.write(result + '\n') weights_path = construct_weights_path( self.module_name, self.config) self.saver.save(sess, save_path=weights_path, global_step=epoch)
def save(data, output): with open(output, 'wb') as handle: pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL) util.log('Created a pickle file \'' + output + '\'.')