Example #1
0
        model.print_model_eval()

    cur_params, cur_margins = model.sess.run(
        [model.params, model.margin], feed_dict=model.all_train_feed_dict)
    cur_influences = model.get_influence_on_test_loss(
        test_indices=[test_idx],
        train_idx=np.arange(num_train),
        force_refresh=False)

    params[counter, :] = np.concatenate(cur_params)
    margins[counter, :] = cur_margins
    influences[counter, :] = cur_influences

    if temp == 0:
        actual_loss_diffs[counter, :], predicted_loss_diffs[
            counter, :], indices_to_remove[
                counter, :] = experiments.test_retraining(
                    model,
                    test_idx,
                    iter_to_load=0,
                    force_refresh=False,
                    num_steps=2000,
                    remove_type='maxinf',
                    num_to_remove=num_to_remove)

np.savez('output/hinge_results',
         temps=temps,
         indices_to_remove=indices_to_remove,
         actual_loss_diffs=actual_loss_diffs,
         predicted_loss_diffs=predicted_loss_diffs,
         influences=influences)
Example #2
0
    data_sets=data_sets,
    initial_learning_rate=initial_learning_rate,
    keep_probs=keep_probs,
    decay_epochs=decay_epochs,
    mini_batch=False,
    train_dir='output',
    log_dir='log',
    model_name='spam_logreg_lbfgs')

tf_model.train()

test_idx = 8
actual_loss_diffs, predicted_loss_diffs_cg, indices_to_remove = experiments.test_retraining(
    tf_model,
    test_idx,
    iter_to_load=0,
    force_refresh=False,
    num_to_remove=500,
    remove_type='maxinf',
    random_seed=0)

# LiSSA
np.random.seed(17)
predicted_loss_diffs_lissa = tf_model.get_influence_on_test_loss(
    [test_idx],
    indices_to_remove,
    approx_type='cg',
    approx_params={
        'scale': 25,
        'recursion_depth': 5000,
        'damping': 0,
        'batch_size': 1,
                  train_dir='output',
                  log_dir='log',
                  model_name='cifar_all_cnn_c')

num_steps = 500000
model.train(num_steps=num_steps,
            iter_to_switch_to_batch=10000000,
            iter_to_switch_to_sgd=10000000)
iter_to_load = num_steps - 1

test_idx = 6

actual_loss_diffs, predicted_loss_diffs, indices_to_remove = experiments.test_retraining(
    model,
    test_idx=test_idx,
    iter_to_load=iter_to_load,
    num_to_remove=100,
    num_steps=30000,
    remove_type='maxinf',
    force_refresh=True)

np.savez('output/cifar_all_cnn_c_iter-500k_retraining-100.npz',
         actual_loss_diffs=actual_loss_diffs,
         predicted_loss_diffs=predicted_loss_diffs,
         indices_to_remove=indices_to_remove)

# Load the trained model
model.load_checkpoint(499999)

# compute influence values for the set of test points
test_indices = [6]