Ejemplo n.º 1
0
def run(test_phase, n_labeled, data_seed):
    minibatch_size = 100

    data = data_loader(n_labeled=n_labeled,
                       data_seed=data_seed,
                       test_phase=test_phase)

    print('{} is loaded with {} of training samples'.format(
        datasets_name[FLAGS.dataset_index], data['num_train']))

    if n_labeled == 'all':
        n_labeled_per_batch = minibatch_size
        max_consistency_cost = minibatch_size
    else:
        # n_labeled_per_batch = 'vary'
        n_labeled_per_batch = 20
        max_consistency_cost = minibatch_size * int(
            n_labeled) / data['num_train']

    hyper_dcit = {
        'input_dim': data['input_dim'],
        'label_dim': data['label_dim'],
        'flip_horizontally': True,
        'max_consistency_cost': max_consistency_cost,
        'apply_consistency_to_labeled': True,
        'adam_beta_2_during_rampup': 0.999,
        'ema_decay_during_rampup': 0.999,
        'normalize_input': False,
        'rampdown_length': 25000,
        'training_length': 150000,
        'test_only': FLAGS.test_only
    }

    tf.reset_default_graph()
    runner_name = os.path.basename(__file__).split(".")[0]
    file_name = '{}_{}'.format(runner_name, n_labeled)
    log_plot = Training_log_plot(file_name, data_seed)
    model = mean_teacher(RunContext(file_name, data_seed), hyper_dcit)

    training_batches = minibatching.training_batches(data.training,
                                                     minibatch_size,
                                                     n_labeled_per_batch)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(
        data.evaluation, minibatch_size)

    if FLAGS.test_only:
        model.restore(FLAGS.ckp)
        model.evaluate(evaluation_batches_fn)
    else:
        model.train(training_batches, evaluation_batches_fn)
def run(test_phase, n_labeled, data_seed, data_type, bg_noise, bg_noise_level):

    minibatch_size = 100
    n_labeled_per_batch = minibatch_size

    data = data_loader(n_labeled=n_labeled,
                       data_seed=data_seed,
                       test_phase=test_phase,
                       bg_noise=bg_noise,
                       urban_noise=True)

    print('{} is loaded with {} of training samples'.format(
        datasets_name[FLAGS.dataset_index], data['num_train']))

    hyper_dcit = {
        'input_dim': data['input_dim'],
        'label_dim': data['label_dim'],
        'cnn': 'audio',
        'flip_horizontally': False,
        'max_consistency_cost': 0,
        'apply_consistency_to_labeled': False,
        'adam_beta_2_during_rampup': 0.999,
        'ema_decay_during_rampup': 0.999,
        'normalize_input': True,
        'rampdown_length': 25000,
        'rampup_length': 40000,
        'training_length': 80000,
        'bg_noise': bg_noise,
        'bg_noise_input': flat(data['bg_noise_img']),
        'bg_noise_level': bg_noise_level
    }

    tf.reset_default_graph()
    runner_name = os.path.basename(__file__).split(".")[0]
    file_name = '{}_{}'.format(runner_name, n_labeled)
    model = mean_teacher(RunContext(file_name, data_seed), hyper_dcit)

    training_batches = minibatching.training_batches(data.training,
                                                     minibatch_size,
                                                     n_labeled_per_batch)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(
        data.evaluation, minibatch_size)

    model.train(training_batches, evaluation_batches_fn)
Ejemplo n.º 3
0
def run(data_seed=0):
    n_labeled = 4000

    model = mean_teacher(RunContext(__file__, 0))
    model['flip_horizontally'] = True
    model['normalize_input'] = False  # Keep ZCA information
    model['rampdown_length'] = 0
    model['rampup_length'] = 5000
    model['input_dim'] = (32, 32, 3)
    model['training_length'] = 40000
    model['max_consistency_cost'] = 50.0

    tensorboard_dir = model.save_tensorboard_graph()
    LOG.info("Saved tensorboard graph to %r", tensorboard_dir)

    cifar = Cifar10ZCA(data_seed, n_labeled)
    training_batches = minibatching.training_batches(cifar.training,
                                                     n_labeled_per_batch=50)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(
        cifar.evaluation)

    model.train(training_batches, evaluation_batches_fn)
Ejemplo n.º 4
0
def run(test_phase, n_labeled, data_seed):

    minibatch_size = 100

    data = data_loader(n_labeled=n_labeled,
                       data_seed=data_seed,
                       test_phase=test_phase)

    if n_labeled == 'all':
        n_labeled_per_batch =  minibatch_size
        max_consistency_cost = minibatch_size
    else:
        n_labeled_per_batch = 'vary'
        max_consistency_cost = minibatch_size* int(n_labeled) / data['num_train']

    hyper_dcit = {'input_dim': data['input_dim'],
                'label_dim': data['label_dim'],
                'cnn':'tower',
                'flip_horizontally':True,
                'max_consistency_cost': max_consistency_cost,
                'adam_beta_2_during_rampup': 0.999,
                'ema_decay_during_rampup': 0.999,
                'normalize_input': False,
                'rampdown_length': 25000,
                'training_length': 150000 }

    tf.reset_default_graph()
    model = mean_teacher(RunContext(__file__, data_seed), hyper_dcit)

    training_batches = minibatching.training_batches(data.training,
                                                     minibatch_size,
                                                     n_labeled_per_batch)
    evaluation_batches_fn = minibatching.evaluation_epoch_generator(data.evaluation,
                                                                    minibatch_size)

    model.train(training_batches, evaluation_batches_fn)