Beispiel #1
0
def main(model, training_cnf, data_dir, start_epoch, resume_lr, weights_from,
         clean, visuals):
    util.check_required_program_args([model, training_cnf, data_dir])
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf

    util.init_logging('train.log',
                      file_log_level=logging.INFO,
                      console_log_level=logging.INFO,
                      clean=clean)
    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir, model_def.image_size[0])
    training_iter, validation_iter = create_training_iters(
        cnf, data_set, model_def.crop_size, start_epoch,
        cnf.get('iterator_type', 'queued') == 'parallel')
    trainer = SupervisedTrainer(model,
                                cnf,
                                training_iter,
                                validation_iter,
                                classification=cnf['classification'])
    trainer.fit(data_set,
                weights_from,
                start_epoch,
                resume_lr,
                verbose=1,
                summary_every=cnf.get('summary_every', 10),
                clean=clean,
                visuals=visuals)
Beispiel #2
0
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from,
         resume_lr, gpu_memory_fraction, is_summary):
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf

    util.init_logging('train.log',
                      file_log_level=logging.INFO,
                      console_log_level=logging.INFO)
    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir, model_def.image_size[0])
    standardizer = cnf.get('standardizer', NoOpStandardizer())

    training_iter, validation_iter = create_training_iters(cnf,
                                                           data_set,
                                                           standardizer,
                                                           model_def.crop_size,
                                                           start_epoch,
                                                           parallel=parallel)
    trainer = SupervisedTrainer(model,
                                cnf,
                                training_iter,
                                validation_iter,
                                resume_lr=resume_lr,
                                classification=cnf['classification'],
                                gpu_memory_fraction=gpu_memory_fraction,
                                is_summary=is_summary,
                                loss_type='kappa_log')
    trainer.fit(data_set,
                weights_from,
                start_epoch,
                verbose=1,
                summary_every=399)
Beispiel #3
0
def train():
    mnist = input_data.read_data_sets("MNIST_data/", one_hot=False)

    width = 28
    height = 28

    train_images = mnist[0].images.reshape(-1, height, width, 1)
    train_labels = mnist[0].labels

    validation_images = mnist[1].images.reshape(-1, height, width, 1)
    validation_labels = mnist[1].labels

    data_set = DataSet(train_images, train_labels,
                       validation_images, validation_labels)

    training_cnf = {
        'classification': True,
        'validation_scores': [('validation accuracy', util.accuracy_wrapper), ('validation kappa', util.kappa_wrapper)],
        'num_epochs': 50,
        'lr_policy': StepDecayPolicy(
            schedule={
                0: 0.01,
                30: 0.001,
            }
        )
    }
    util.init_logging('train.log', file_log_level=logging.INFO,
                      console_log_level=logging.INFO)

    trainer = SupervisedTrainer(model, training_cnf, classification=training_cnf[
                                'classification'], is_summary=True)
    trainer.fit(data_set, weights_from=None,
                start_epoch=1, verbose=1, summary_every=10)
Beispiel #4
0
def main(model, training_cnf, data_dir, start_epoch, resume_lr, weights_from, clean):
    util.check_required_program_args([model, training_cnf, data_dir])
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf

    util.init_logging('train.log', file_log_level=logging.INFO, console_log_level=logging.INFO, clean=clean)
    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir, model_def.image_size[0])
    training_iter = BatchIterator(cnf['batch_size_train'], True)
    validation_iter = BatchIterator(cnf['batch_size_test'], True)
    trainer = SupervisedTrainer(model, cnf, training_iter, validation_iter, classification=cnf['classification'])
    trainer.fit(data_set, weights_from, start_epoch, resume_lr, verbose=1,
                summary_every=cnf.get('summary_every', 10), clean=clean)
Beispiel #5
0
    return end_points(is_training)


training_cnf = {
    'classification':
    True,
    'validation_scores': [('validation accuracy', util.accuracy_wrapper),
                          ('validation kappa', util.kappa_wrapper)],
    'num_epochs':
    30,
    'lr_policy':
    StepDecayPolicy(schedule={
        0: 0.001,
        15: 0.0001,
    }),
    'optimizer':
    tf.train.AdamOptimizer()
}
util.init_logging('train.log',
                  file_log_level=logging.INFO,
                  console_log_level=logging.INFO)

trainer = SupervisedTrainer(model,
                            training_cnf,
                            classification=training_cnf['classification'])
trainer.fit(data_set,
            weights_from=None,
            start_epoch=1,
            verbose=1,
            summary_every=10)