def run(test_phase, data_seed, n_labeled, training_length, rampdown_length):
    minibatch_size = 100
    n_labeled_per_batch = 100

    tf.reset_default_graph()
    model = Model(RunContext(__file__, data_seed))

    cifar = Cifar10ZCA(n_labeled=n_labeled,
                       data_seed=data_seed,
                       test_phase=test_phase)

    model['flip_horizontally'] = True
    model['ema_consistency'] = True
    model['max_consistency_cost'] = 0.0
    model['apply_consistency_to_labeled'] = False
    model['adam_beta_2_during_rampup'] = 0.999
    model['ema_decay_during_rampup'] = 0.999
    model['normalize_input'] = False  # Keep ZCA information
    model['rampdown_length'] = rampdown_length
    model['training_length'] = training_length

    training_batches = minibatching.training_batches(cifar.training,
                                                     minibatch_size,
                                                     n_labeled_per_batch)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(
        cifar.evaluation, minibatch_size)

    tensorboard_dir = model.save_tensorboard_graph()
    LOG.info("Saved tensorboard graph to %r", tensorboard_dir)

    model.train(training_batches, evaluation_batches_fn)
def run(result_dir, test_phase, n_labeled, data_seed, model_type):
    minibatch_size = 100
    hyperparams = model_hyperparameters(model_type, n_labeled)

    tf.reset_default_graph()
    model = Model(result_dir=result_dir)

    cifar = Cifar10ZCA(n_labeled=n_labeled,
                       data_seed=data_seed,
                       test_phase=test_phase)

    model['flip_horizontally'] = True
    model['ema_consistency'] = hyperparams['ema_consistency']
    model['max_consistency_coefficient'] = hyperparams['max_consistency_coefficient']
    model['apply_consistency_to_labeled'] = hyperparams['apply_consistency_to_labeled']
    model['adam_beta_2_during_rampup'] = 0.999
    model['ema_decay_during_rampup'] = 0.999
    model['normalize_input'] = False  # Keep ZCA information
    model['rampdown_length'] = 25000
    model['training_length'] = 150000

    training_batches = minibatching.training_batches(cifar.training,
                                                     minibatch_size,
                                                     hyperparams['n_labeled_per_batch'])
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(cifar.evaluation,
                                                                    minibatch_size)

    tensorboard_dir = model.save_tensorboard_graph()
    LOG.info("Saved tensorboard graph to %r", tensorboard_dir)

    model.train(training_batches, evaluation_batches_fn)
Ejemplo n.º 3
0
def run():
    data_seed = 0
    date = datetime.now()
    n_labeled = 4000


    result_dir = "{root}/{dataset}/{model}/{date:%Y-%m-%d_%H:%M:%S}/{seed}".format(
        root='results/final_eval',
        dataset='cifar10_{}'.format(n_labeled),
        model='mean_teacher',
        date=date,
        seed=data_seed
    )

    model = Model(result_dir=result_dir)
    model['flip_horizontally'] = True
    model['max_consistency_coefficient'] = 100.0 * n_labeled / 50000
    model['adam_beta_2_during_rampup'] = 0.999
    model['ema_decay_during_rampup'] = 0.999
    model['normalize_input'] = False  # Keep ZCA information
    model['rampdown_length'] = 25000
    model['training_length'] = 150000

    tensorboard_dir = model.save_tensorboard_graph()
    LOG.info("Saved tensorboard graph to %r", tensorboard_dir)

    cifar = Cifar10ZCA(data_seed, n_labeled)
    training_batches = minibatching.training_batches(cifar.training)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(cifar.evaluation)

    model.train(training_batches, evaluation_batches_fn)
Ejemplo n.º 4
0
def run(data_seed=0):
    n_labeled = 4000

    model = Model(RunContext(__file__, 0))
    model['flip_horizontally'] = True
    model['normalize_input'] = False  # Keep ZCA information
    model['rampdown_length'] = 0
    model['rampup_length'] = 5000
    model['training_length'] = 40000
    model['max_consistency_cost'] = 50.0

    tensorboard_dir = model.save_tensorboard_graph()
    LOG.info("Saved tensorboard graph to %r", tensorboard_dir)

    cifar = Cifar10ZCA(data_seed, n_labeled)
    training_batches = minibatching.training_batches(cifar.training, n_labeled_per_batch=50)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(cifar.evaluation)

    model.train(training_batches, evaluation_batches_fn)
Ejemplo n.º 5
0
def run():
    data_seed = 0
    n_labeled = 4000

    model = Model(RunContext(__file__, 0))
    model['flip_horizontally'] = True
    model['max_consistency_cost'] = 100.0 * n_labeled / 50000
    model['adam_beta_2_during_rampup'] = 0.999
    model['ema_decay_during_rampup'] = 0.999
    model['normalize_input'] = False  # Keep ZCA information
    model['rampdown_length'] = 25000
    model['training_length'] = 150000

    tensorboard_dir = model.save_tensorboard_graph()
    LOG.info("Saved tensorboard graph to %r", tensorboard_dir)

    cifar = Cifar10ZCA(data_seed, n_labeled)
    training_batches = minibatching.training_batches(cifar.training)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(
        cifar.evaluation)

    model.train(training_batches, evaluation_batches_fn)