def main(model, training_cnf, data_dir, start_epoch, resume_lr, weights_from, clean, visuals): util.check_required_program_args([model, training_cnf, data_dir]) model_def = util.load_module(model) model = model_def.model cnf = util.load_module(training_cnf).cnf util.init_logging('train.log', file_log_level=logging.INFO, console_log_level=logging.INFO, clean=clean) if weights_from: weights_from = str(weights_from) data_set = DataSet(data_dir, model_def.image_size[0]) training_iter, validation_iter = create_training_iters( cnf, data_set, model_def.crop_size, start_epoch, cnf.get('iterator_type', 'queued') == 'parallel') trainer = SupervisedTrainer(model, cnf, training_iter, validation_iter, classification=cnf['classification']) trainer.fit(data_set, weights_from, start_epoch, resume_lr, verbose=1, summary_every=cnf.get('summary_every', 10), clean=clean, visuals=visuals)
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from, resume_lr, gpu_memory_fraction, is_summary, num_classes, log_file_name): model_def = util.load_module(model) model = model_def cnf = util.load_module(training_cnf).cnf if weights_from: weights_from = str(weights_from) data_set = DataSet(data_dir, model_def.image_size[0]) standardizer = cnf.get('standardizer', NoOpStandardizer()) training_iter, validation_iter = create_training_iters(cnf, data_set, standardizer, model_def.crop_size, start_epoch, parallel=parallel) trainer = SemiSupervisedTrainer(model, cnf, training_iterator=training_iter, validation_iterator=validation_iter, resume_lr=resume_lr, classification=cnf['classification'], gpu_memory_fraction=gpu_memory_fraction, is_summary=is_summary, verbosity=1, log_file_name=log_file_name) trainer.fit(data_set, num_classes, weights_from, start_epoch, summary_every=399)
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from, resume_lr, gpu_memory_fraction, is_summary): model_def = util.load_module(model) model = model_def.model cnf = util.load_module(training_cnf).cnf util.init_logging('train.log', file_log_level=logging.INFO, console_log_level=logging.INFO) if weights_from: weights_from = str(weights_from) data_set = DataSet(data_dir, model_def.image_size[0]) standardizer = cnf.get('standardizer', NoOpStandardizer()) training_iter, validation_iter = create_training_iters(cnf, data_set, standardizer, model_def.crop_size, start_epoch, parallel=parallel) trainer = SupervisedTrainer(model, cnf, training_iter, validation_iter, resume_lr=resume_lr, classification=cnf['classification'], gpu_memory_fraction=gpu_memory_fraction, is_summary=is_summary, loss_type='kappa_log') trainer.fit(data_set, weights_from, start_epoch, verbose=1, summary_every=399)
def __init__(self, model_cnf, data_dir, image_size, crop_size, channel_dim=3, start_epoch=1, parallel=True): self.cnf = util.load_module(model_cnf).cnf self.image_size = image_size self.crop_size = crop_size self.channel_dim = channel_dim self.data_set = DataSet(data_dir, image_size) self.standardizer = self.cnf.get('standardizer', NoOpStandardizer()) self.training_iter, self.validation_iter = create_training_iters( self.cnf, self.data_set, self.standardizer, [crop_size, crop_size], start_epoch, parallel=parallel) self.training_X = self.data_set.training_X self.training_y = self.data_set.training_y self.validation_X = self.data_set.validation_X self.validation_y = self.data_set.validation_y
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from, resume_lr, gpu_memory_fraction, is_summary, num_classes): model_def = util.load_module(model) model = model_def cnf = util.load_module(training_cnf).cnf util.init_logging('train_ss.log', file_log_level=logging.INFO, console_log_level=logging.INFO) if weights_from: weights_from = str(weights_from) data_set = DataSet(data_dir, model_def.image_size[0]) standardizer = cnf.get('standardizer', NoOpStandardizer()) training_iter, validation_iter = create_training_iters(cnf, data_set, standardizer, model_def.crop_size, start_epoch, parallel=parallel) trainer = GenerativeLearner(model, cnf, training_iterator=training_iter, validation_iterator=validation_iter, resume_lr=resume_lr, classification=cnf['classification'], gpu_memory_fraction=gpu_memory_fraction, is_summary=is_summary, verbosity=2) trainer.fit(data_set, num_classes, weights_from, start_epoch, summary_every=399)
def try_config(args, cnf): """For trying out configurations. Args: args: command line arguments regarding training cnf: training configuration sampled from hyperband search space Returns: a dictionary containing final loss value and early stop flag """ model_def = util.load_module(args['model']) model = model_def.model if args['weights_from']: weights_from = str(args['weights_from']) else: weights_from = args['weights_from'] data_set = DataSet(args['data_dir'], model_def.image_size[0], mode=cnf.get('mode'), multilabel=cnf.get('multilabel', False)) standardizer = cnf.get('standardizer', NoOpStandardizer()) cutout = cnf.get('cutout', None) training_iter, validation_iter = create_training_iters( cnf, data_set, standardizer, model_def.crop_size, args['start_epoch'], parallel=args['parallel'], cutout=cutout, data_balancing=cnf.get('data_balancing', False)) learner = SupervisedLearner( model, cnf, training_iterator=training_iter, validation_iterator=validation_iter, resume_lr=args['resume_lr'], classification=cnf['classification'], gpu_memory_fraction=args['gpu_memory_fraction'], num_classes=args['num_classes'], is_summary=args['is_summary'], loss_type=args['loss_type'], weighted=args['weighted'], log_file_name=args['log_file_name'], verbosity=args['verbose'], is_early_stop=cnf.get('is_early_stop', True)) _early_stop, _loss = learner.fit(data_set, weights_from=weights_from, start_epoch=args['start_epoch'], weights_dir=args['weights_dir'], summary_every=399) return {'early_stop': _early_stop, 'loss': _loss}
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from, weights_dir, resume_lr, gpu_memory_fraction, num_classes, is_summary, loss_type, weighted, log_file_name): model_def = util.load_module(model) model = model_def.model cnf = util.load_module(training_cnf).cnf if weights_from: weights_from = str(weights_from) data_set = DataSet(data_dir, model_def.image_size[0], mode=cnf.get('mode'), multilabel=cnf.get('multilabel', False)) standardizer = cnf.get('standardizer', NoOpStandardizer()) cutout = cnf.get('cutout', None) training_iter, validation_iter = create_training_iters( cnf, data_set, standardizer, model_def.crop_size, start_epoch, parallel=parallel, cutout=cutout, data_balancing=cnf.get('data_balancing', False)) learner = SupervisedLearner(model, cnf, training_iterator=training_iter, validation_iterator=validation_iter, resume_lr=resume_lr, classification=cnf['classification'], gpu_memory_fraction=gpu_memory_fraction, num_classes=num_classes, is_summary=is_summary, loss_type=loss_type, weighted=weighted, log_file_name=log_file_name) learner.fit(data_set, weights_from, start_epoch=start_epoch, weights_dir=weights_dir, summary_every=399)
def main(model, training_cnf, data_dir, start_epoch, resume_lr, weights_from, weights_exclude_scopes, trainable_scopes, clean, visuals): util.check_required_program_args([model, training_cnf, data_dir]) sys.path.insert(0, '.') model_def = util.load_module(model) model = model_def.model cnf = util.load_module(training_cnf).cnf util.init_logging('train.log', file_log_level=logging.INFO, console_log_level=logging.INFO, clean=clean) if weights_from: weights_from = str(weights_from) data_set = DataSet(data_dir, model_def.image_size[0]) training_iter, validation_iter = create_training_iters( cnf, data_set, model_def.crop_size, start_epoch, cnf.get('iterator_type', 'parallel') == 'parallel') try: input_shape = (-1, model_def.crop_size[1], model_def.crop_size[0], model_def.num_channels) except AttributeError: input_shape = (-1, model_def.crop_size[1], model_def.crop_size[0], 3) trainer = SupervisedTrainerQ(model, cnf, input_shape, trainable_scopes, training_iter, validation_iter, classification=cnf['classification']) trainer.fit(data_set, weights_from, weights_exclude_scopes, start_epoch, resume_lr, verbose=1, summary_every=cnf.get('summary_every', 10), clean=clean, visuals=visuals)