def create_kth_fold(k): logger.info('Generating %d-th data for training...' % k) config = my_utils.load_config() ic = my_utils.load_config('image_config.json') dermis = inputs.load_raw_data('dermis', config) dermquest = inputs.load_raw_data('dermquest', config) train_data = inputs.get_kth_fold(dermquest, k, ic['n_folds'], seed=ic['split_seed']) if ic['use_dermis']: train_data = train_data + dermis train_df = to_csv(train_data) train_df.to_csv(ic['train_csv_file'], index=None) logger.info('Successfully convert %d examples to %s' % (len(train_df), ic['train_csv_file'])) test_data = inputs.get_kth_fold(dermquest, k, ic['n_folds'], seed=ic['split_seed'], type_='test') test_df = to_csv(test_data) test_df.to_csv(ic['test_csv_file'], index=None) logger.info('Successfully convert %d examples to %s' % (len(test_df), ic['test_csv_file']))
def main(): logger.info('Creating csv file...') config = my_utils.load_config() image_config = my_utils.load_config('./image_config.json') k = image_config['k'] n_folds = image_config['n_folds'] split_seed = image_config['split_seed'] use_dermis = image_config['use_dermis'] train_csv_file = image_config['train_csv_file'] test_csv_file = image_config['test_csv_file'] dermquest = inputs.load_raw_data('dermquest', config) dermis = inputs.load_raw_data('dermis', config) train_data = inputs.get_kth_fold(dermquest, k, n_folds, seed=split_seed) if use_dermis: train_data = train_data + dermis train_df = to_csv(train_data) train_df.to_csv(train_csv_file, index=None) logger.info('Successfully convert %d examples to %s' % (len(train_data), train_csv_file)) test_data = inputs.get_kth_fold(dermquest, k, n_folds, seed=split_seed, type_='test') test_df = to_csv(test_data) test_df.to_csv(test_csv_file, index=None) logger.info('Successfully convert %d examples to %s' % (len(test_data), test_csv_file)) logger.info('CSV file created!')
def create_dermis_for_train_dermquest_for_test(): logger.info('Creating dermis for training and dermquest for testing...') config = my_utils.load_config() image_config = my_utils.load_config('image_config.json') dermis = inputs.load_raw_data('dermis', config) dermquest = inputs.load_raw_data('dermquest', config) train_df = to_csv(dermis) train_df.to_csv(image_config['train_csv_file'], index=None) logger.info('Successfully convert %d examples to %s' % (len(train_df), image_config['train_csv_file'])) test_df = to_csv(dermquest) test_df.to_csv(image_config['test_csv_file'], index=None) logger.info('Successfully convert %d examples to %s' % (len(test_df), image_config['test_csv_file']))
def eval_seg_model(eval_one_func): logger.info('K-fold evaluation process...') config = my_utils.load_config() dermquest = inputs.load_raw_data(FLAGS.db, config) mm = evaluation.SegRestoredModel( tf.train.latest_checkpoint(config['train_dir'])) result = test_one_model(mm, dermquest.listing, config, eval_one_func) logger.info('Done evaluation') return result
def kfold_training(): logger.info('K-fold training...') config = my_utils.load_config() dermis = inputs.load_raw_data('dermis', config) dermquest = inputs.load_raw_data('dermquest', config) n_folds = config['n_folds'] for i in range(n_folds): kfold_data = inputs.get_kth_fold(dermquest, i, n_folds, seed=config['split_seed']) train_data = dermis + kfold_data kfold_config = my_utils.get_config_for_kfold( config, train_dir=os.path.join(config['train_dir'], str(i)), n_examples_for_train=len(train_data)) logger.info('Training for %i-th fold data...' % i) train_one_fold(train_data, kfold_config) logger.info('Done training')
def eval_one_fold(fold, ckpt_path, out_path, ignore_iou=None): if ignore_iou: logger.warning('Will ignore images with IoU small than %.3f' % ignore_iou) config = my_utils.load_config() net = RestoredModel(ckpt_path) dermquest = inputs.load_raw_data('dermquest', config) # train_data = inputs.get_kth_fold(dermquest, fold, config['n_folds'], seed=config['split_seed']) test_data = inputs.get_kth_fold(dermquest, fold, config['n_folds'], seed=config['split_seed'], type_='test') with net.graph.as_default() as g: result = { 'TP': 0, 'TN': 0, 'FP': 0, 'FN': 0 } def update_dict(target, to_update): for key in to_update: target[key] += to_update[key] with tf.Session(graph=g, config=tf.ConfigProto(device_count={'GPU': 0})): counter = 0 for i, base in enumerate(test_data.listing): image, label, bbox_gt = inputs.load_one_example(base, highest_to=800) result_i, _ = evaluation.inference_with_restored_model(net, image, label, bbox_gt=bbox_gt, verbose=False, times=3, gt_prob=0.51) if ignore_iou and _['IoU'] < ignore_iou: counter += 1 print(i, base, '---->') continue update_dict(result, result_i) result_i.update(my_utils.metric_many_from_counter(result_i)) result.update(my_utils.metric_many_from_counter(result)) logger.warning('%d of the images are ignored' % counter) logger.info(result) my_utils.dump_obj(out_path, result) logger.info('Result saved at %s' % out_path)
def train_from_scratch(): logger.info('Training from scratch...') config = my_utils.load_config() # data = inputs.load_raw_data(config['database'], config) # dermis_data = inputs.load_raw_data('dermis', config) # data = data + dermis_data dermis = inputs.load_raw_data('dermis', config) dermquest = inputs.load_raw_data('dermquest', config) kfold_train_data = inputs.get_kth_fold(dermquest, 0, config['n_folds'], seed=config['split_seed']) data = dermis + kfold_train_data n_examples_for_train = len(data) n_steps_for_train = my_utils.calc_training_steps( config['n_epochs_for_train'], config['batch_size'], n_examples_for_train) config['n_examples_for_train'] = n_examples_for_train with tf.Graph().as_default() as g: image_ph, label_ph, bbox_ph = bbox_model.model_placeholder(config) def build_feed_dict(image_, label_, bbox_): return {image_ph: image_, label_ph: label_, bbox_ph: bbox_} global_step = tf.train.get_or_create_global_step() mm = bbox_model.Model(image_ph, config['input_size']) train_op, summary_op, debug = build_train(mm, label_ph, bbox_ph, config) logger.info('Done loading data set `%s`, %i examples in total' % (config['database'], len(data))) my_utils.create_and_delete_if_exists(config['train_dir']) saver = tf.train.Saver() writer = tf.summary.FileWriter(config['train_dir'], graph=g) with tf.Session() as sess: tf.global_variables_initializer().run() for i, (images, labels, bboxes) in enumerate(data.aug_train_batch(config)): # image, label, bbox = data[0] feed_dict = build_feed_dict(images, labels, bboxes) ops = [debug['bbox_loss'], debug['total_loss'], train_op] bbox_loss_val, total_loss_val, _ = sess.run( ops, feed_dict=feed_dict) if i % config['log_every'] == 0: fmt = 'step {:>5}/{} bbox_loss {:.5f}, total_loss {:.5f}' logger.info( fmt.format(i, n_steps_for_train, bbox_loss_val, total_loss_val)) if i % config['checkpoint_every'] == 0: my_utils.save_model(saver, config) logger.info('Model saved at step-%i' % sess.run(global_step)) if config['save_summary_every'] and i % config[ 'save_summary_every'] == 0: my_utils.add_summary(writer, summary_op, feed_dict) logger.info('Summary saved at step-%i' % sess.run(global_step)) save_path = my_utils.save_model(saver, config) logger.info('Done training, model saved at %s' % (save_path, ))