Exemplo n.º 1
0
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from,
         resume_lr, gpu_memory_fraction, is_summary, num_classes,
         log_file_name):
    model_def = util.load_module(model)
    model = model_def
    cnf = util.load_module(training_cnf).cnf

    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir, model_def.image_size[0])
    standardizer = cnf.get('standardizer', NoOpStandardizer())

    training_iter, validation_iter = create_training_iters(cnf,
                                                           data_set,
                                                           standardizer,
                                                           model_def.crop_size,
                                                           start_epoch,
                                                           parallel=parallel)
    trainer = SemiSupervisedTrainer(model,
                                    cnf,
                                    training_iterator=training_iter,
                                    validation_iterator=validation_iter,
                                    resume_lr=resume_lr,
                                    classification=cnf['classification'],
                                    gpu_memory_fraction=gpu_memory_fraction,
                                    is_summary=is_summary,
                                    verbosity=1,
                                    log_file_name=log_file_name)
    trainer.fit(data_set,
                num_classes,
                weights_from,
                start_epoch,
                summary_every=399)
Exemplo n.º 2
0
  def __init__(self,
               model_cnf,
               data_dir,
               image_size,
               crop_size,
               channel_dim=3,
               start_epoch=1,
               parallel=True):
    self.cnf = util.load_module(model_cnf).cnf
    self.image_size = image_size
    self.crop_size = crop_size
    self.channel_dim = channel_dim
    self.data_set = DataSet(data_dir, image_size)
    self.standardizer = self.cnf.get('standardizer', NoOpStandardizer())
    self.training_iter, self.validation_iter = create_training_iters(
        self.cnf,
        self.data_set,
        self.standardizer, [crop_size, crop_size],
        start_epoch,
        parallel=parallel)

    self.training_X = self.data_set.training_X
    self.training_y = self.data_set.training_y
    self.validation_X = self.data_set.validation_X
    self.validation_y = self.data_set.validation_y
Exemplo n.º 3
0
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from,
         resume_lr, gpu_memory_fraction, is_summary):
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf

    util.init_logging('train.log',
                      file_log_level=logging.INFO,
                      console_log_level=logging.INFO)
    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir, model_def.image_size[0])
    standardizer = cnf.get('standardizer', NoOpStandardizer())

    training_iter, validation_iter = create_training_iters(cnf,
                                                           data_set,
                                                           standardizer,
                                                           model_def.crop_size,
                                                           start_epoch,
                                                           parallel=parallel)
    trainer = SupervisedTrainer(model,
                                cnf,
                                training_iter,
                                validation_iter,
                                resume_lr=resume_lr,
                                classification=cnf['classification'],
                                gpu_memory_fraction=gpu_memory_fraction,
                                is_summary=is_summary,
                                loss_type='kappa_log')
    trainer.fit(data_set,
                weights_from,
                start_epoch,
                verbose=1,
                summary_every=399)
Exemplo n.º 4
0
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from,
         resume_lr, gpu_memory_fraction, is_summary, num_classes):
    model_def = util.load_module(model)
    model = model_def
    cnf = util.load_module(training_cnf).cnf

    util.init_logging('train_ss.log',
                      file_log_level=logging.INFO,
                      console_log_level=logging.INFO)
    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir, model_def.image_size[0])
    standardizer = cnf.get('standardizer', NoOpStandardizer())

    training_iter, validation_iter = create_training_iters(cnf,
                                                           data_set,
                                                           standardizer,
                                                           model_def.crop_size,
                                                           start_epoch,
                                                           parallel=parallel)
    trainer = GenerativeLearner(model,
                                cnf,
                                training_iterator=training_iter,
                                validation_iterator=validation_iter,
                                resume_lr=resume_lr,
                                classification=cnf['classification'],
                                gpu_memory_fraction=gpu_memory_fraction,
                                is_summary=is_summary,
                                verbosity=2)
    trainer.fit(data_set,
                num_classes,
                weights_from,
                start_epoch,
                summary_every=399)
Exemplo n.º 5
0
def try_config(args, cnf):
    """For trying out configurations.

  Args:
      args: command line arguments regarding training
      cnf: training configuration sampled from hyperband search space

  Returns:
      a dictionary containing final loss value and early stop flag
  """
    model_def = util.load_module(args['model'])
    model = model_def.model

    if args['weights_from']:
        weights_from = str(args['weights_from'])
    else:
        weights_from = args['weights_from']

    data_set = DataSet(args['data_dir'],
                       model_def.image_size[0],
                       mode=cnf.get('mode'),
                       multilabel=cnf.get('multilabel', False))

    standardizer = cnf.get('standardizer', NoOpStandardizer())
    cutout = cnf.get('cutout', None)

    training_iter, validation_iter = create_training_iters(
        cnf,
        data_set,
        standardizer,
        model_def.crop_size,
        args['start_epoch'],
        parallel=args['parallel'],
        cutout=cutout,
        data_balancing=cnf.get('data_balancing', False))
    learner = SupervisedLearner(
        model,
        cnf,
        training_iterator=training_iter,
        validation_iterator=validation_iter,
        resume_lr=args['resume_lr'],
        classification=cnf['classification'],
        gpu_memory_fraction=args['gpu_memory_fraction'],
        num_classes=args['num_classes'],
        is_summary=args['is_summary'],
        loss_type=args['loss_type'],
        weighted=args['weighted'],
        log_file_name=args['log_file_name'],
        verbosity=args['verbose'],
        is_early_stop=cnf.get('is_early_stop', True))

    _early_stop, _loss = learner.fit(data_set,
                                     weights_from=weights_from,
                                     start_epoch=args['start_epoch'],
                                     weights_dir=args['weights_dir'],
                                     summary_every=399)
    return {'early_stop': _early_stop, 'loss': _loss}
Exemplo n.º 6
0
def create_training_iters(cnf, data_set, crop_size, epoch, parallel):
    standardizer = cnf.get('standardizer', NoOpStandardizer())
    balancing = 'balance_ratio' in cnf
    if parallel:
        if balancing:
            training_iterator_maker = iterator.BalancingDAIterator
        else:
            training_iterator_maker = iterator.ParallelDAIterator
        validation_iterator_maker = iterator.ParallelDAIterator
        logger.info('Using parallel training iterator')
    else:
        if balancing:
            training_iterator_maker = iterator.BalancingQueuedDAIterator
        else:
            training_iterator_maker = iterator.QueuedDAIterator
        validation_iterator_maker = iterator.ParallelDAIterator
        # validation_iterator_maker = iterator.QueuedDAIterator
        logger.info('Using queued training iterator')

    preprocessor = None
    if balancing:
        balancing_args = make_args(
            balance_weights=data_set.balance_weights(),
            final_balance_weights=cnf['final_balance_weights'],
            balance_ratio=cnf['balance_ratio'],
            balance_epoch_count=epoch - 1,
        )
    else:
        balancing_args = {}

    training_iterator = training_iterator_maker(
        batch_size=cnf['batch_size_train'],
        shuffle=True,
        preprocessor=preprocessor,
        crop_size=crop_size,
        is_training=True,
        aug_params=cnf.get('aug_params', data.no_augmentation_params),
        standardizer=standardizer,
        fill_mode='constant',
        # save_to_dir=da_training_preview_dir,
        **balancing_args)

    validation_iterator = validation_iterator_maker(
        batch_size=cnf['batch_size_test'],
        shuffle=True,
        preprocessor=preprocessor,
        crop_size=crop_size,
        is_training=False,
        standardizer=standardizer,
        fill_mode='constant')

    return training_iterator, validation_iterator
Exemplo n.º 7
0
def create_prediction_iter(cnf, crop_size, preprocessor=None, sync=False):
    standardizer = cnf.get('standardizer', NoOpStandardizer())
    if sync:
        prediction_iterator_maker = iterator.DAIterator
    else:
        prediction_iterator_maker = iterator.ParallelDAIterator

    prediction_iterator = prediction_iterator_maker(
        batch_size=cnf['batch_size_test'],
        shuffle=False,
        preprocessor=preprocessor,
        crop_size=crop_size,
        is_training=False,
        standardizer=standardizer,
        fill_mode='constant')

    return prediction_iterator
Exemplo n.º 8
0
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from,
         weights_dir, resume_lr, gpu_memory_fraction, num_classes, is_summary,
         loss_type, weighted, log_file_name):
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf

    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir,
                       model_def.image_size[0],
                       mode=cnf.get('mode'),
                       multilabel=cnf.get('multilabel', False))
    standardizer = cnf.get('standardizer', NoOpStandardizer())
    cutout = cnf.get('cutout', None)

    training_iter, validation_iter = create_training_iters(
        cnf,
        data_set,
        standardizer,
        model_def.crop_size,
        start_epoch,
        parallel=parallel,
        cutout=cutout,
        data_balancing=cnf.get('data_balancing', False))
    learner = SupervisedLearner(model,
                                cnf,
                                training_iterator=training_iter,
                                validation_iterator=validation_iter,
                                resume_lr=resume_lr,
                                classification=cnf['classification'],
                                gpu_memory_fraction=gpu_memory_fraction,
                                num_classes=num_classes,
                                is_summary=is_summary,
                                loss_type=loss_type,
                                weighted=weighted,
                                log_file_name=log_file_name)
    learner.fit(data_set,
                weights_from,
                start_epoch=start_epoch,
                weights_dir=weights_dir,
                summary_every=399)
Exemplo n.º 9
0
def predict(model, training_cnf, predict_dir, weights_from, dataset_name,
            convert, image_size, sync, predict_type):
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf
    weights_from = str(weights_from)
    images = data.get_image_files(predict_dir)

    standardizer = cnf.get('standardizer', NoOpStandardizer())

    preprocessor = convert_preprocessor(image_size) if convert else None
    prediction_iterator = create_prediction_iter(cnf, model_def.crop_size,
                                                 preprocessor, sync)

    if predict_type == 'quasi':
        predictor = QuasiCropPredictor(model, cnf, weights_from,
                                       prediction_iterator, 20)
    elif predict_type == '1_crop':
        predictor = OneCropPredictor(model, cnf, weights_from,
                                     prediction_iterator)
    elif predict_type == '10_crop':
        predictor = TenCropPredictor(model, cnf, weights_from,
                                     prediction_iterator,
                                     model_def.crop_size[0],
                                     model_def.image_size[0])
    else:
        raise ValueError('Unknown predict_type: %s' % predict_type)
    predictions = predictor.predict(images)
    predictions = predictions.reshape(-1, 1000)

    # print(predictions)

    names = data.get_names(images)
    for i, name in enumerate(names):
        print("---Predictions for %s:" % name)
        preds = (np.argsort(predictions[i])[::-1])[0:5]
        for p in preds:
            print(class_names[p], predictions[i][p])
Exemplo n.º 10
0
def predict(model, training_cnf, predict_dir, weights_from, dataset_name,
            convert, image_size, sync, predict_type):
    images = data.get_image_files(predict_dir)

    # Form now, hard coded models, cnfs, and weights
    # Need to take these from program inputs or an ensembling config file

    print('Creating predictor 1')
    weights_from1 = 'weights.sa/model-epoch-97.ckpt'
    model1 = 'examples/mnist_model_sa.py'
    training_cnf1 = 'examples/mnist_cnf.py'
    model_def1 = util.load_module(model1)
    model1 = model_def1.model
    cnf1 = util.load_module(training_cnf1).cnf
    standardizer = cnf1.get('standardizer', NoOpStandardizer())
    preprocessor = convert_preprocessor(
        model_def1.image_size[0]) if convert else None
    prediction_iterator1 = create_prediction_iter(cnf1, standardizer,
                                                  model_def1.crop_size,
                                                  preprocessor, sync)
    predictor1 = QuasiCropPredictor(model1, cnf1, weights_from1,
                                    prediction_iterator1, 20)
    # predictor1 = OneCropPredictor(model1, cnf1, weights_from1, prediction_iterator1)

    print('Creating predictor 2')
    weights_from2 = 'weights.rv/model-epoch-31.ckpt'
    model2 = 'examples/mnist_model.py'
    training_cnf2 = 'examples/mnist_cnf.py'
    model_def2 = util.load_module(model2)
    model2 = model_def2.model
    cnf2 = util.load_module(training_cnf2).cnf
    standardizer = cnf2.get('standardizer', NoOpStandardizer())
    preprocessor = convert_preprocessor(
        model_def2.image_size[0]) if convert else None
    prediction_iterator2 = create_prediction_iter(cnf2, standardizer,
                                                  model_def2.crop_size,
                                                  preprocessor, sync)
    predictor2 = QuasiCropPredictor(model2, cnf2, weights_from2,
                                    prediction_iterator2, 20)
    # predictor2 = OneCropPredictor(model2, cnf2, weights_from2, prediction_iterator2)

    predictor = EnsemblePredictor([predictor1, predictor2])

    predictions = predictor.predict(images)

    if not os.path.exists(os.path.join(predict_dir, '..', 'results')):
        os.mkdir(os.path.join(predict_dir, '..', 'results'))
    if not os.path.exists(
            os.path.join(predict_dir, '..', 'results', dataset_name)):
        os.mkdir(os.path.join(predict_dir, '..', 'results', dataset_name))

    names = data.get_names(images)
    image_prediction_probs = np.column_stack([names, predictions])
    headers = ['score%d' % (i + 1) for i in range(predictions.shape[1])]
    title = np.array(['image'] + headers)
    image_prediction_probs = np.vstack([title, image_prediction_probs])
    prediction_probs_file = os.path.abspath(
        os.path.join(predict_dir, '..', 'results', dataset_name,
                     'predictions.csv'))
    np.savetxt(prediction_probs_file,
               image_prediction_probs,
               delimiter=",",
               fmt="%s")
    print('Predictions saved to: %s' % prediction_probs_file)

    if cnf1['classification']:
        class_predictions = np.argmax(predictions, axis=1)
        image_class_predictions = np.column_stack([names, class_predictions])
        title = np.array(['image', 'label'])
        image_class_predictions = np.vstack([title, image_class_predictions])
        prediction_class_file = os.path.abspath(
            os.path.join(predict_dir, '..', 'results', dataset_name,
                         'predictions_class.csv'))
        np.savetxt(prediction_class_file,
                   image_class_predictions,
                   delimiter=",",
                   fmt="%s")
        print('Class predictions saved to: %s' % prediction_class_file)