Ejemplo n.º 1
0
def main():
    train, test, _ = imdb.load_data(path='imdb.pkl',
                                    n_words=10000,
                                    valid_portion=0.1)
    trainX, trainY = train
    testX, testY = test

    trainX = pad_sequences(trainX, maxlen=100, value=0.)
    testX = pad_sequences(testX, maxlen=100, value=0.)
    trainY = np.asarray(trainY)
    testY = np.asarray(testY)
    data_set = DataSet(trainX, trainY, testX, testY)
    training_cnf = {
        'classification': True,
        'validation_scores': [('validation accuracy', util.accuracy_tf)],
        'num_epochs': 50,
        'input_size': (100, ),
        'lr_policy': StepDecayPolicy(schedule={
            0: 0.01,
            30: 0.001,
        })
    }
    util.init_logging('train.log',
                      file_log_level=logging.INFO,
                      console_log_level=logging.INFO)

    learner = SupervisedLearner(model,
                                training_cnf,
                                classification=training_cnf['classification'],
                                is_summary=False)
    learner.fit(data_set, weights_from=None, start_epoch=1)
Ejemplo n.º 2
0
def try_config(args, cnf):
    """For trying out configurations.

  Args:
      args: command line arguments regarding training
      cnf: training configuration sampled from hyperband search space

  Returns:
      a dictionary containing final loss value and early stop flag
  """
    model_def = util.load_module(args['model'])
    model = model_def.model

    if args['weights_from']:
        weights_from = str(args['weights_from'])
    else:
        weights_from = args['weights_from']

    data_set = DataSet(args['data_dir'],
                       model_def.image_size[0],
                       mode=cnf.get('mode'),
                       multilabel=cnf.get('multilabel', False))

    standardizer = cnf.get('standardizer', NoOpStandardizer())
    cutout = cnf.get('cutout', None)

    training_iter, validation_iter = create_training_iters(
        cnf,
        data_set,
        standardizer,
        model_def.crop_size,
        args['start_epoch'],
        parallel=args['parallel'],
        cutout=cutout,
        data_balancing=cnf.get('data_balancing', False))
    learner = SupervisedLearner(
        model,
        cnf,
        training_iterator=training_iter,
        validation_iterator=validation_iter,
        resume_lr=args['resume_lr'],
        classification=cnf['classification'],
        gpu_memory_fraction=args['gpu_memory_fraction'],
        num_classes=args['num_classes'],
        is_summary=args['is_summary'],
        loss_type=args['loss_type'],
        weighted=args['weighted'],
        log_file_name=args['log_file_name'],
        verbosity=args['verbose'],
        is_early_stop=cnf.get('is_early_stop', True))

    _early_stop, _loss = learner.fit(data_set,
                                     weights_from=weights_from,
                                     start_epoch=args['start_epoch'],
                                     weights_dir=args['weights_dir'],
                                     summary_every=399)
    return {'early_stop': _early_stop, 'loss': _loss}
Ejemplo n.º 3
0
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from,
         weights_dir, resume_lr, gpu_memory_fraction, num_classes, is_summary,
         loss_type, weighted, log_file_name):
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf

    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir,
                       model_def.image_size[0],
                       mode=cnf.get('mode'),
                       multilabel=cnf.get('multilabel', False))
    standardizer = cnf.get('standardizer', NoOpStandardizer())
    cutout = cnf.get('cutout', None)

    training_iter, validation_iter = create_training_iters(
        cnf,
        data_set,
        standardizer,
        model_def.crop_size,
        start_epoch,
        parallel=parallel,
        cutout=cutout,
        data_balancing=cnf.get('data_balancing', False))
    learner = SupervisedLearner(model,
                                cnf,
                                training_iterator=training_iter,
                                validation_iterator=validation_iter,
                                resume_lr=resume_lr,
                                classification=cnf['classification'],
                                gpu_memory_fraction=gpu_memory_fraction,
                                num_classes=num_classes,
                                is_summary=is_summary,
                                loss_type=loss_type,
                                weighted=weighted,
                                log_file_name=log_file_name)
    learner.fit(data_set,
                weights_from,
                start_epoch=start_epoch,
                weights_dir=weights_dir,
                summary_every=399)
Ejemplo n.º 4
0
def train():
  mnist = input_data.read_data_sets("MNIST_data/", one_hot=False)

  width = 28
  height = 28

  train_images = mnist[0].images.reshape(-1, height, width, 1)
  train_labels = mnist[0].labels

  validation_images = mnist[1].images.reshape(-1, height, width, 1)
  validation_labels = mnist[1].labels

  data_set = DataSet(train_images, train_labels, validation_images, validation_labels)

  training_cnf = {
      'classification':
      True,
      'validation_scores': [('accuracy', tf.metrics.accuracy),
                            ('kappa', tf.contrib.metrics.cohen_kappa)],
      'num_epochs':
      50,
      'batch_size_train':
      32,
      'batch_size_test':
      32,
      'input_size': (28, 28, 1),
      'lr_policy':
      StepDecayPolicy(schedule={
          0: 0.01,
          30: 0.001,
      })
  }

  learner = SupervisedLearner(
      model,
      training_cnf,
      classification=training_cnf['classification'],
      is_summary=True,
      num_classes=10)
  learner.fit(data_set, weights_from=None, start_epoch=1, summary_every=10)
Ejemplo n.º 5
0
def try_config(args, cnf):
    """For trying out configurations.

  Args:
      args: command line arguments regarding training
      cnf: training configuration sampled from hyperband search space

  Returns:
      a dictionary containing final loss value and early stop flag
  """
    mnist = input_data.read_data_sets("MNIST_data/", one_hot=False)

    width = 28
    height = 28

    train_images = mnist[0].images
    train_labels = mnist[0].labels

    validation_images = mnist[1].images
    validation_labels = mnist[1].labels

    data_set = DataSet(train_images, train_labels, validation_images,
                       validation_labels)

    model_def = util.load_module(args['model'])
    model = model_def.model

    learner = SupervisedLearner(model,
                                cnf,
                                classification=cnf['classification'],
                                is_summary=False,
                                num_classes=10,
                                verbosity=args['verbose'],
                                is_early_stop=cnf.get('is_early_stop', True))
    _early_stop, _loss = learner.fit(data_set,
                                     weights_from=None,
                                     start_epoch=1)

    return {'early_stop': _early_stop, 'loss': _loss}
Ejemplo n.º 6
0
def main(model, training_cnf, data_dir, parallel, start_epoch, weights_from,
         weights_dir, resume_lr, gpu_memory_fraction, num_classes, is_summary,
         loss_type):
    model_def = util.load_module(model)
    model = model_def.model
    cnf = util.load_module(training_cnf).cnf

    util.init_logging('train.log',
                      file_log_level=logging.INFO,
                      console_log_level=logging.INFO)
    if weights_from:
        weights_from = str(weights_from)

    data_set = DataSet(data_dir, model_def.image_size[0])
    standardizer = cnf.get('standardizer', NoOpStandardizer())

    training_iter, validation_iter = create_training_iters(cnf,
                                                           data_set,
                                                           standardizer,
                                                           model_def.crop_size,
                                                           start_epoch,
                                                           parallel=parallel)
    learner = SupervisedLearner(model,
                                cnf,
                                training_iterator=training_iter,
                                validation_iterator=validation_iter,
                                resume_lr=resume_lr,
                                classification=cnf['classification'],
                                gpu_memory_fraction=gpu_memory_fraction,
                                num_classes=num_classes,
                                is_summary=is_summary,
                                loss_type=loss_type)
    learner.fit(data_set,
                weights_from,
                start_epoch=start_epoch,
                weights_dir=weights_dir,
                summary_every=399)