Example #1
0
def plot():
    with open('results.pkl') as f:
        all_learning_curves, all_val_loss, all_test_loss,\
        all_fakedata, all_fakedatasize, all_L2 = zip(*pickle.load(f))

    import matplotlib as mpl
    mpl.rcParams['font.family'] = 'serif'

    # Fake data
    fig = plt.figure(0)
    fig.clf()
    ax = plt.Axes(fig, [0., 0., 1., 1.])
    fig.set_size_inches((4.5, 1.75))
    ax.set_axis_off()
    fig.add_axes(ax)
    images = all_fakedata[-1]
    immin = np.min(images.ravel())
    immax = np.max(images.ravel())
    cax = plot_mnist_images(images, ax, ims_per_row=5, padding=2)
    cbar = fig.colorbar(cax, ticks=[immin, 0, immax], shrink=.7)
    cbar.ax.set_yticklabels(
        ['{:2.2f}'.format(immin), '0', '{:2.2f}'.format(immax)])

    plt.savefig('fake_data.png', bbox_inches='tight')
    plt.savefig('fake_data.pdf', bbox_inches='tight')

    # Everything else plot.
    fig = plt.figure(0)
    fig.clf()
    N_figs = 3
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.plot(all_test_loss, 'o-', label="Test Loss")
    ax.plot(all_fakedatasize, 'o-', label="Fake Data Scale")
    ax.plot(all_L2, 'o-', label="L2_regularization")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    ax = fig.add_subplot(N_figs, 1, 3)
    ax.set_title("Fake Data")
    images = all_fakedata[-1]
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))
    plt.savefig("fig.png")
Example #2
0
def plot():
    with open('results.pkl') as f:
        all_learning_curves, all_val_loss, all_test_loss,\
        all_fakedata, all_fakedatasize, all_L2 = zip(*pickle.load(f))

    import matplotlib as mpl
    mpl.rcParams['font.family'] = 'serif'

    # Fake data
    fig = plt.figure(0)
    fig.clf()
    ax = plt.Axes(fig, [0., 0., 1., 1.])
    fig.set_size_inches((4.5,1.75))
    ax.set_axis_off()
    fig.add_axes(ax)
    images = all_fakedata[-1]
    immin = np.min(images.ravel())
    immax = np.max(images.ravel())
    cax = plot_mnist_images(images, ax, ims_per_row=5, padding=2)
    cbar = fig.colorbar(cax, ticks=[immin, 0, immax], shrink=.7)
    cbar.ax.set_yticklabels(['{:2.2f}'.format(immin), '0', '{:2.2f}'.format(immax)])

    plt.savefig('fake_data.png', bbox_inches='tight')
    plt.savefig('fake_data.pdf', bbox_inches='tight')

    # Everything else plot.
    fig = plt.figure(0)
    fig.clf()
    N_figs = 3
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.plot(all_test_loss, 'o-', label="Test Loss")
    ax.plot(all_fakedatasize, 'o-', label="Fake Data Scale")
    ax.plot(all_L2, 'o-', label="L2_regularization")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    ax = fig.add_subplot(N_figs, 1, 3)
    ax.set_title("Fake Data")
    images = all_fakedata[-1]
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))
    plt.savefig("fig.png")
Example #3
0
def plot():
    with open('results.pkl') as f:
        all_learning_curves, all_val_loss, all_test_loss, all_weights, all_L2 = zip(
            *pickle.load(f))

    fig = plt.figure(0)
    fig.clf()
    N_figs = 2
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.plot(all_test_loss, 'o-', label="Test Loss")
    #ax.plot(all_L2, 'o-', label="L2_regularization")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    plt.savefig("fig.png")

    fig = plt.figure(0)
    fig.clf()
    N_figs = 1

    ax = fig.add_subplot(N_figs, 1, 1)
    images = all_weights[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))

    fig.tight_layout()
    plt.savefig("weights.png")
    plt.savefig("weights.pdf", pad_inches=0.05, bbox_inches='tight')

    fig = plt.figure(0)
    fig.clf()
    N_figs = 1

    ax = fig.add_subplot(N_figs, 1, 1)
    images = all_L2[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))

    fig.tight_layout()
    plt.savefig("penalties.png")
    plt.savefig("penalties.pdf", pad_inches=0.05, bbox_inches='tight')
Example #4
0
def plot():
    with open('results.pkl') as f:
        all_learning_curves, all_val_loss, all_test_loss, all_weights, all_L2 = zip(*pickle.load(f))

    fig = plt.figure(0)
    fig.clf()
    N_figs = 2
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.plot(all_test_loss, 'o-', label="Test Loss")
    #ax.plot(all_L2, 'o-', label="L2_regularization")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    plt.savefig("fig.png")


    fig = plt.figure(0)
    fig.clf()
    N_figs = 1

    ax = fig.add_subplot(N_figs, 1, 1)
    images = all_weights[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))

    fig.tight_layout()
    plt.savefig("weights.png")
    plt.savefig("weights.pdf", pad_inches=0.05, bbox_inches='tight')

    fig = plt.figure(0)
    fig.clf()
    N_figs = 1

    ax = fig.add_subplot(N_figs, 1, 1)
    images = all_L2[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))

    fig.tight_layout()
    plt.savefig("penalties.png")
    plt.savefig("penalties.pdf", pad_inches=0.05, bbox_inches='tight')
Example #5
0
def plot():
    with open('results.pkl') as f:
        output, fakedata = pickle.load(f)
        all_learning_curves, all_val_loss, all_test_loss,\
        all_fakedatasize, all_L2, test_err = zip(*output)

    # Fake data
    fig = plt.figure(0)
    fig.clf()
    ax = fig.add_subplot(1, 1, 1)
    images = fakedata
    plot_mnist_images(images, ax, ims_per_row=5)
    fig.set_size_inches((8,12))
    plt.savefig('fake_data.pdf', pad_inches=0.05, bbox_inches='tight')


    fig = plt.figure(0)
    fig.clf()
    N_figs = 3
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.plot(all_test_loss, 'o-', label="Test Loss")
    ax.plot(all_fakedatasize, 'o-', label="Fake Data Scale")
    ax.plot(all_L2, 'o-', label="L2_regularization")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    ax = fig.add_subplot(N_figs, 1, 3)
    ax.set_title("Fake Data")
    images = fakedata
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))
    plt.savefig("fig.png")
Example #6
0
def plot():
    with open('results.pkl') as f:
        all_learning_curves, all_val_loss, all_test_loss,\
        all_fakedata, all_fakedatasize = zip(*pickle.load(f))

    fig = plt.figure(0)
    fig.clf()
    N_figs = 3
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.plot(all_test_loss, 'o-', label="Test Loss")
    ax.plot(all_fakedatasize, 'o-', label="Fake Data Scale")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    ax = fig.add_subplot(N_figs, 1, 3)
    ax.set_title("Fake Data")
    images = all_fakedata[-1]
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))

    plt.savefig("/tmp/fig.png")
    plt.savefig("fig.png")

    plt.show()
Example #7
0
def plot():
    with open('results.pkl') as f:
        all_learning_curves, all_val_loss, all_fakedata = zip(*pickle.load(f))

    fig = plt.figure(0)
    fig.clf()
    N_figs = 3
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = len(all_learning_curves) / 50
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    ax = fig.add_subplot(N_figs, 1, 3)
    ax.set_title("Fake Data")
    images = all_fakedata[-1]
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))

    plt.savefig("/tmp/fig.png")
    plt.savefig("fig.png")

    plt.show()
Example #8
0
def plot():
    import matplotlib.pyplot as plt
    with open('results.pkl') as f:
        A, valid_losses, test_losses = pickle.load(f)
    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((8, 12))

    ax = fig.add_subplot(211)
    ax.set_title("Meta learning curves")
    ax.plot(valid_losses, 'o-', label="Validation")
    ax.plot(test_losses, 'o-', label="Test")
    ax.set_ylabel("Negative log prob")
    ax.set_xlabel("Step number")
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(212)
    test_images = build_test_images()
    transformed_images = np.dot(test_images, A.T)
    cat_images = np.concatenate((test_images, transformed_images))
    plot_mnist_images(cat_images, ax, ims_per_row=test_images.shape[0])

    plt.savefig("/tmp/fig.png")
    plt.savefig("fig.png")
def plot():
    import matplotlib.pyplot as plt
    with open('results.pkl') as f:
        A, valid_losses, test_losses = pickle.load(f)
    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((8,12))

    ax = fig.add_subplot(211)
    ax.set_title("Meta learning curves")
    ax.plot(valid_losses, 'o-', label="Validation")
    ax.plot(test_losses , 'o-', label="Test")
    ax.set_ylabel("Negative log prob")
    ax.set_xlabel("Step number")
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(212)
    test_images = build_test_images()
    transformed_images = np.dot(test_images, A.T)
    cat_images = np.concatenate((test_images, transformed_images))
    plot_mnist_images(cat_images, ax, ims_per_row=test_images.shape[0])

    plt.savefig("/tmp/fig.png")
    plt.savefig("fig.png")
Example #10
0
def plot():
    import matplotlib.pyplot as plt
    with open('results.pkl') as f:
        results, parser, parsed_init_hypergrad = pickle.load(f)

    # Show final ARD in the first layer filters.
    L2s = results['log_L2_reg']
    L2parser = parser.empty_copy()
    L2parser.vect = L2s
    l2_images = L2parser[('weights', 0)].T

    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6, 8))
    ax = fig.add_subplot(111)
    plot_mnist_images(l2_images, ax, ims_per_row=30, padding=0)
    fig.set_size_inches((8, 12))

    #fig.tight_layout()
    plt.savefig("penalties.png")
    plt.savefig("penalties.pdf", pad_inches=0.05, bbox_inches='tight')

    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6, 8))
    ax = fig.add_subplot(111)
    plt.hist(l2_images.ravel(), 100)
    plt.savefig("penalty_histogram.png")

    # Show first layer filters from the last meta-iteration.
    weights = results['example_weights']
    parser.vect = weights
    weight_images = parser[('weights', 0)].T

    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6, 8))
    ax = fig.add_subplot(111)
    plot_mnist_images(weight_images, ax, ims_per_row=30, padding=0)
    fig.set_size_inches((8, 12))

    #fig.tight_layout()
    plt.savefig("weights.png")
    plt.savefig("weights.pdf", pad_inches=0.05, bbox_inches='tight')

    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6, 8))
    # ----- Primal learning curves -----
    ax = fig.add_subplot(311)
    ax.set_title('Primal learning curves')
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['learning_curve'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.set_ylabel('Negative log prob')
    #ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(312)
    ax.set_title('Meta learning curves')
    losses = ['train_loss', 'valid_loss', 'tests_loss']
    for loss_type in losses:
        ax.plot(results[loss_type], 'o-', label=loss_type)
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Negative log prob')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(313)
    ax.set_title('Meta-gradient magnitude')
    ax.plot(results['meta_grad_magnitude'],
            'o-',
            label='Meta-gradient magnitude')
    ax.plot(results['meta_grad_angle'], 'o-', label='Meta-gradient angle')
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Meta-gradient Magnitude')
    ax.legend(loc=1, frameon=False)

    plt.savefig('learning_curves.png')

    # ----- Learning curve info -----
    fig.clf()
    ax = fig.add_subplot(311)
    ax.set_title('Primal learning curves')
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['grad_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    #ax.legend(loc=1, frameon=False)
    ax.set_title('Grad norm')

    ax = fig.add_subplot(312)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['weight_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.legend(loc=1, frameon=False)
    ax.set_title('Weight norm')

    ax = fig.add_subplot(313)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['velocity_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.set_title('Velocity norm')
    ax.legend(loc=1, frameon=False)
    plt.savefig('extra_learning_curves.png')
Example #11
0
def plot():
    with open('results.pkl') as f:
        all_learning_curves, all_val_loss, all_test_loss, all_weights, all_L2 = zip(*pickle.load(f))

    fig = plt.figure(0)
    fig.clf()
    N_figs = 4
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, 'o-')
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    #ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, 'o-', label="Train Loss")
    ax.plot(all_val_loss, 'o-', label="Validation Loss")
    ax.plot(all_test_loss, 'o-', label="Test Loss")
    #ax.plot(all_L2, 'o-', label="L2_regularization")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    ax = fig.add_subplot(N_figs, 1, 3)
    ax.set_title("Weights")
    images = all_weights[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))

    ax = fig.add_subplot(N_figs, 1, 4)
    ax.set_title("Per-weight L2 penalty")
    images = all_L2[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))

    plt.savefig("fig.png")


    fig = plt.figure(0)
    fig.clf()
    N_figs = 1

    ax = fig.add_subplot(N_figs, 1, 1)
    #ax.set_title("Weights")
    images = all_weights[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))

    fig.tight_layout()
    plt.savefig("weights.png")
    plt.savefig("weights.pdf", pad_inches=0.05, bbox_inches='tight')

    from matplotlib import rc
    rc('font',**{'family':'serif'})

    # Show ARD penalties.
    fig = plt.figure(0)
    plt.clf()
    ax = plt.Axes(fig, [0., 0., 1., 1.])
    #ax = fig.add_subplot(1, 1, 1)
    fig.set_size_inches((4.5,1.75))
    ax.set_axis_off()
    fig.add_axes(ax)

    images = all_L2[-1].T.copy()   # Get rid of spikes
    newmax = np.percentile(images.ravel(), 98.0)
    over_ixs = images > newmax
    images[over_ixs] = newmax

    cax = plot_mnist_images(images, ax, ims_per_row=5, padding=2, vmin=0.0)
    cbar = fig.colorbar(cax, ticks=[0, newmax], shrink=.7)
    cbar.ax.set_yticklabels(['0', '{:2.2f}'.format(newmax)])


    plt.savefig("penalties.png")
    plt.savefig("penalties.pdf", bbox_inches='tight')
Example #12
0
def plot():

    import matplotlib.pyplot as plt
    with open('results.pkl') as f:
        results, parser = pickle.load(f)

    # Fake data
    fig = plt.figure(0)
    fig.clf()
    ax = fig.add_subplot(1, 1, 1)
    ax.set_title("Fake Data")
    images = results['fake_data'][-1]
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))
    plt.savefig('fake_data.pdf', pad_inches=0.05, bbox_inches='tight')


    # Show first layer filters from the last meta-iteration.
    fig = plt.figure(0)
    fig.clf()
    ax = fig.add_subplot(1, 1, 1)
    ax.set_title("Weights")
    weights = results['example_weights']
    parser.vect = weights
    weight_images = parser[('weights', 0)].T
    plot_mnist_images(weight_images, ax, ims_per_row=10)
    fig.set_size_inches((8,12))
    plt.savefig('first_layer_weights.pdf', pad_inches=0.05, bbox_inches='tight')



    # ----- Nice versions of Alpha and beta schedules for paper -----
    fig = plt.figure(0)
    fig.clf()
    ax = fig.add_subplot(411)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['log_alphas'][-1].T, parser.names):
        if name[0] == 'weights':
            ax.plot(np.exp(cur_results), 'o-', label=name)
    #ax.set_xlabel('Learning Iteration', fontproperties='serif')
    low, high = ax.get_ylim()
    ax.set_ylim([0, high])
    ax.set_ylabel('Step size', fontproperties='serif')
    ax.set_xticklabels([])
    ax.legend(numpoints=1, loc=1, frameon=False, bbox_to_anchor=(1.0, 0.5),
              prop={'family':'serif', 'size':'12'})

    ax = fig.add_subplot(412)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['invlogit_betas'][-1].T, parser.names):
        if name[0] == 'weights':
            ax.plot(logit(cur_results), 'o-', label=name)
    low, high = ax.get_ylim()
    ax.set_ylim([0, 1])
    ax.set_xlabel('Learning Iteration', fontproperties='serif')
    ax.set_ylabel('Momentum', fontproperties='serif')

    ax = fig.add_subplot(413)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['log_alphas'][-1].T, parser.names):
        if name[0] == 'biases':
            ax.plot(np.exp(cur_results), 'o-', label=name)
    #ax.set_xlabel('Learning Iteration', fontproperties='serif')
    low, high = ax.get_ylim()
    ax.set_ylim([0, high])
    ax.set_ylabel('Step size', fontproperties='serif')
    ax.set_xticklabels([])
    ax.legend(numpoints=1, loc=1, frameon=False, bbox_to_anchor=(1.0, 0.5),
              prop={'family':'serif', 'size':'12'})

    ax = fig.add_subplot(414)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['invlogit_betas'][-1].T, parser.names):
        if name[0] == 'biases':
            ax.plot(logit(cur_results), 'o-', label=name)
    low, high = ax.get_ylim()
    ax.set_ylim([0, 1])
    ax.set_xlabel('Learning Iteration', fontproperties='serif')
    ax.set_ylabel('Momentum', fontproperties='serif')


    fig.set_size_inches((6,8))
    #plt.show()
    plt.savefig('alpha_beta_paper.png')
    plt.savefig('alpha_beta_paper.pdf', pad_inches=0.05, bbox_inches='tight')

    fig.clf()
    fig.set_size_inches((6,8))
    # ----- Primal learning curves -----
    ax = fig.add_subplot(311)
    #ax.set_title('Primal learning curves')
    #for i, y in enumerate(results['learning_curves']):
    #    ax.plot(y['learning_curve'], 'o-', label='Meta iter {0}'.format(i))
    #ax.set_xlabel('Epoch number')
    #ax.set_ylabel('Negative log prob')
    #ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(312)
    ax.set_title('Meta learning curves')
    losses = ['train_loss', 'valid_loss', 'tests_loss']
    for loss_type in losses:
        ax.plot(results[loss_type], 'o-', label=loss_type)
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Negative log prob')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(313)
    ax.set_title('Meta-gradient magnitude')
    ax.plot(results['meta_grad_magnitude'], 'o-', label='Meta-gradient magnitude')
    ax.plot(results['meta_grad_angle'], 'o-', label='Meta-gradient angle')
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Meta-gradient Magnitude')
    ax.legend(loc=1, frameon=False)

    plt.savefig('learning_curves.png')

    # ----- Learning curve info -----
    fig.clf()
    ax = fig.add_subplot(311)
    ax.set_title('Primal learning curves')
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['grad_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    #ax.legend(loc=1, frameon=False)
    ax.set_title('Grad norm')

    ax = fig.add_subplot(312)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['weight_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.legend(loc=1, frameon=False)
    ax.set_title('Weight norm')

    ax = fig.add_subplot(313)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['velocity_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.set_title('Velocity norm')
    ax.legend(loc=1, frameon=False)
    plt.savefig('extra_learning_curves.png')

    # ----- Alpha and beta schedules -----
    fig.clf()
    ax = fig.add_subplot(211)
    ax.set_title('Alpha learning curves')
    for i, y in enumerate(results['log_alphas']):
        ax.plot(y, 'o-', label="Meta iter {0}".format(i))
    ax.set_xlabel('Primal iter number')
    #ax.set_ylabel('Log alpha')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(212)
    ax.set_title('Beta learning curves')
    for y in results['invlogit_betas']:
        ax.plot(y, 'o-')
    ax.set_xlabel('Primal iter number')
    ax.set_ylabel('Inv logit beta')
    plt.savefig('alpha_beta_curves.png')

    # ----- Init scale and L2 reg -----
    fig.clf()
    ax = fig.add_subplot(211)
    ax.set_title('Init scale learning curves')
    for i, y in enumerate(zip(*results['log_param_scale'])):
        if parser.names[i][0] == 'weights':
            ax.plot(y, 'o-', label=parser.names[i])
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Log param scale')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(212)
    ax.set_title('Init scale learning curves')
    for i, y in enumerate(zip(*results['log_param_scale'])):
        if parser.names[i][0] == 'biases':
            ax.plot(y, 'o-', label=parser.names[i])
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Log param scale')
    ax.legend(loc=1, frameon=False)

    plt.savefig('scale_and_reg.png')
Example #13
0
def plot():
    import matplotlib.pyplot as plt
    with open('results.pkl') as f:
        results, parser, parsed_init_hypergrad = pickle.load(f)

    # Show final ARD in the first layer filters.
    L2s = results['log_L2_reg']
    L2parser = parser.empty_copy()
    L2parser.vect = L2s
    l2_images = L2parser[('weights', 0)].T

    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6,8))
    ax = fig.add_subplot(111)
    plot_mnist_images(l2_images, ax, ims_per_row=30, padding=0)
    fig.set_size_inches((8,12))

    #fig.tight_layout()
    plt.savefig("penalties.png")
    plt.savefig("penalties.pdf", pad_inches=0.05, bbox_inches='tight')


    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6,8))
    ax = fig.add_subplot(111)
    plt.hist(l2_images.ravel(), 100)
    plt.savefig("penalty_histogram.png")



    # Show first layer filters from the last meta-iteration.
    weights = results['example_weights']
    parser.vect = weights
    weight_images = parser[('weights', 0)].T

    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6,8))
    ax = fig.add_subplot(111)
    plot_mnist_images(weight_images, ax, ims_per_row=30, padding=0)
    fig.set_size_inches((8,12))

    #fig.tight_layout()
    plt.savefig("weights.png")
    plt.savefig("weights.pdf", pad_inches=0.05, bbox_inches='tight')







    fig = plt.figure(0)
    fig.clf()
    fig.set_size_inches((6,8))
    # ----- Primal learning curves -----
    ax = fig.add_subplot(311)
    ax.set_title('Primal learning curves')
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['learning_curve'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.set_ylabel('Negative log prob')
    #ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(312)
    ax.set_title('Meta learning curves')
    losses = ['train_loss', 'valid_loss', 'tests_loss']
    for loss_type in losses:
        ax.plot(results[loss_type], 'o-', label=loss_type)
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Negative log prob')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(313)
    ax.set_title('Meta-gradient magnitude')
    ax.plot(results['meta_grad_magnitude'], 'o-', label='Meta-gradient magnitude')
    ax.plot(results['meta_grad_angle'], 'o-', label='Meta-gradient angle')
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Meta-gradient Magnitude')
    ax.legend(loc=1, frameon=False)

    plt.savefig('learning_curves.png')


    # ----- Learning curve info -----
    fig.clf()
    ax = fig.add_subplot(311)
    ax.set_title('Primal learning curves')
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['grad_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    #ax.legend(loc=1, frameon=False)
    ax.set_title('Grad norm')

    ax = fig.add_subplot(312)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['weight_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.legend(loc=1, frameon=False)
    ax.set_title('Weight norm')

    ax = fig.add_subplot(313)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['velocity_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.set_title('Velocity norm')
    ax.legend(loc=1, frameon=False)
    plt.savefig('extra_learning_curves.png')
Example #14
0
def plot():

    import matplotlib.pyplot as plt
    with open('results.pkl') as f:
        results, parser = pickle.load(f)

    # Fake data
    fig = plt.figure(0)
    fig.clf()
    ax = fig.add_subplot(1, 1, 1)
    ax.set_title("Fake Data")
    images = results['fake_data'][-1]
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))
    plt.savefig('fake_data.pdf', pad_inches=0.05, bbox_inches='tight')

    # Show first layer filters from the last meta-iteration.
    fig = plt.figure(0)
    fig.clf()
    ax = fig.add_subplot(1, 1, 1)
    ax.set_title("Weights")
    weights = results['example_weights']
    parser.vect = weights
    weight_images = parser[('weights', 0)].T
    plot_mnist_images(weight_images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))
    plt.savefig('first_layer_weights.pdf',
                pad_inches=0.05,
                bbox_inches='tight')

    # ----- Nice versions of Alpha and beta schedules for paper -----
    fig = plt.figure(0)
    fig.clf()
    ax = fig.add_subplot(411)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['log_alphas'][-1].T, parser.names):
        if name[0] == 'weights':
            ax.plot(np.exp(cur_results), 'o-', label=name)
    #ax.set_xlabel('Learning Iteration', fontproperties='serif')
    low, high = ax.get_ylim()
    ax.set_ylim([0, high])
    ax.set_ylabel('Step size', fontproperties='serif')
    ax.set_xticklabels([])
    ax.legend(numpoints=1,
              loc=1,
              frameon=False,
              bbox_to_anchor=(1.0, 0.5),
              prop={
                  'family': 'serif',
                  'size': '12'
              })

    ax = fig.add_subplot(412)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['invlogit_betas'][-1].T,
                                 parser.names):
        if name[0] == 'weights':
            ax.plot(logit(cur_results), 'o-', label=name)
    low, high = ax.get_ylim()
    ax.set_ylim([0, 1])
    ax.set_xlabel('Learning Iteration', fontproperties='serif')
    ax.set_ylabel('Momentum', fontproperties='serif')

    ax = fig.add_subplot(413)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['log_alphas'][-1].T, parser.names):
        if name[0] == 'biases':
            ax.plot(np.exp(cur_results), 'o-', label=name)
    #ax.set_xlabel('Learning Iteration', fontproperties='serif')
    low, high = ax.get_ylim()
    ax.set_ylim([0, high])
    ax.set_ylabel('Step size', fontproperties='serif')
    ax.set_xticklabels([])
    ax.legend(numpoints=1,
              loc=1,
              frameon=False,
              bbox_to_anchor=(1.0, 0.5),
              prop={
                  'family': 'serif',
                  'size': '12'
              })

    ax = fig.add_subplot(414)
    #ax.set_title('Alpha learning curves')
    for cur_results, name in zip(results['invlogit_betas'][-1].T,
                                 parser.names):
        if name[0] == 'biases':
            ax.plot(logit(cur_results), 'o-', label=name)
    low, high = ax.get_ylim()
    ax.set_ylim([0, 1])
    ax.set_xlabel('Learning Iteration', fontproperties='serif')
    ax.set_ylabel('Momentum', fontproperties='serif')

    fig.set_size_inches((6, 8))
    #plt.show()
    plt.savefig('alpha_beta_paper.png')
    plt.savefig('alpha_beta_paper.pdf', pad_inches=0.05, bbox_inches='tight')

    fig.clf()
    fig.set_size_inches((6, 8))
    # ----- Primal learning curves -----
    ax = fig.add_subplot(311)
    #ax.set_title('Primal learning curves')
    #for i, y in enumerate(results['learning_curves']):
    #    ax.plot(y['learning_curve'], 'o-', label='Meta iter {0}'.format(i))
    #ax.set_xlabel('Epoch number')
    #ax.set_ylabel('Negative log prob')
    #ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(312)
    ax.set_title('Meta learning curves')
    losses = ['train_loss', 'valid_loss', 'tests_loss']
    for loss_type in losses:
        ax.plot(results[loss_type], 'o-', label=loss_type)
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Negative log prob')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(313)
    ax.set_title('Meta-gradient magnitude')
    ax.plot(results['meta_grad_magnitude'],
            'o-',
            label='Meta-gradient magnitude')
    ax.plot(results['meta_grad_angle'], 'o-', label='Meta-gradient angle')
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Meta-gradient Magnitude')
    ax.legend(loc=1, frameon=False)

    plt.savefig('learning_curves.png')

    # ----- Learning curve info -----
    fig.clf()
    ax = fig.add_subplot(311)
    ax.set_title('Primal learning curves')
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['grad_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    #ax.legend(loc=1, frameon=False)
    ax.set_title('Grad norm')

    ax = fig.add_subplot(312)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['weight_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.legend(loc=1, frameon=False)
    ax.set_title('Weight norm')

    ax = fig.add_subplot(313)
    for i, y in enumerate(results['learning_curves']):
        ax.plot(y['velocity_norm'], 'o-', label='Meta iter {0}'.format(i))
    ax.set_xlabel('Epoch number')
    ax.set_title('Velocity norm')
    ax.legend(loc=1, frameon=False)
    plt.savefig('extra_learning_curves.png')

    # ----- Alpha and beta schedules -----
    fig.clf()
    ax = fig.add_subplot(211)
    ax.set_title('Alpha learning curves')
    for i, y in enumerate(results['log_alphas']):
        ax.plot(y, 'o-', label="Meta iter {0}".format(i))
    ax.set_xlabel('Primal iter number')
    #ax.set_ylabel('Log alpha')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(212)
    ax.set_title('Beta learning curves')
    for y in results['invlogit_betas']:
        ax.plot(y, 'o-')
    ax.set_xlabel('Primal iter number')
    ax.set_ylabel('Inv logit beta')
    plt.savefig('alpha_beta_curves.png')

    # ----- Init scale and L2 reg -----
    fig.clf()
    ax = fig.add_subplot(211)
    ax.set_title('Init scale learning curves')
    for i, y in enumerate(zip(*results['log_param_scale'])):
        if parser.names[i][0] == 'weights':
            ax.plot(y, 'o-', label=parser.names[i])
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Log param scale')
    ax.legend(loc=1, frameon=False)

    ax = fig.add_subplot(212)
    ax.set_title('Init scale learning curves')
    for i, y in enumerate(zip(*results['log_param_scale'])):
        if parser.names[i][0] == 'biases':
            ax.plot(y, 'o-', label=parser.names[i])
    ax.set_xlabel('Meta iter number')
    ax.set_ylabel('Log param scale')
    ax.legend(loc=1, frameon=False)

    plt.savefig('scale_and_reg.png')
Example #15
0
def plot():
    with open("results.pkl") as f:
        all_learning_curves, all_val_loss, all_test_loss, all_weights, all_L2 = zip(*pickle.load(f))

    fig = plt.figure(0)
    fig.clf()
    N_figs = 4
    ax = fig.add_subplot(N_figs, 1, 1)
    ax.set_title("Learning Curves")
    subsample = np.ceil(float(len(all_learning_curves)) / 50)
    for i, log_alphas in enumerate(all_learning_curves):
        if i % subsample == 0:
            ax.plot(log_alphas, "o-")
    ax.set_ylabel("Loss")
    ax.set_xlabel("Step number")
    # ax.legend(loc=4)

    ax = fig.add_subplot(N_figs, 1, 2)
    ax.set_title("Meta Learning Curve")
    all_train_loss = [curve[-1] for curve in all_learning_curves]
    ax.plot(all_train_loss, "o-", label="Train Loss")
    ax.plot(all_val_loss, "o-", label="Validation Loss")
    ax.plot(all_test_loss, "o-", label="Test Loss")
    # ax.plot(all_L2, 'o-', label="L2_regularization")
    ax.set_ylabel("Validation Loss")
    ax.set_xlabel("Meta Iteration Number")
    ax.legend(loc=2)

    ax = fig.add_subplot(N_figs, 1, 3)
    ax.set_title("Weights")
    images = all_weights[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))

    ax = fig.add_subplot(N_figs, 1, 4)
    ax.set_title("Per-weight L2 penalty")
    images = all_L2[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))

    plt.savefig("fig.png")

    fig = plt.figure(0)
    fig.clf()
    N_figs = 1

    ax = fig.add_subplot(N_figs, 1, 1)
    # ax.set_title("Weights")
    images = all_weights[-1].T
    plot_mnist_images(images, ax, ims_per_row=10)
    fig.set_size_inches((8, 12))

    fig.tight_layout()
    plt.savefig("weights.png")
    plt.savefig("weights.pdf", pad_inches=0.05, bbox_inches="tight")

    from matplotlib import rc

    rc("font", **{"family": "serif"})

    # Show ARD penalties.
    fig = plt.figure(0)
    plt.clf()
    ax = plt.Axes(fig, [0.0, 0.0, 1.0, 1.0])
    # ax = fig.add_subplot(1, 1, 1)
    fig.set_size_inches((4.5, 1.75))
    ax.set_axis_off()
    fig.add_axes(ax)

    images = all_L2[-1].T.copy()  # Get rid of spikes
    newmax = np.percentile(images.ravel(), 98.0)
    over_ixs = images > newmax
    images[over_ixs] = newmax

    cax = plot_mnist_images(images, ax, ims_per_row=5, padding=2, vmin=0.0)
    cbar = fig.colorbar(cax, ticks=[0, newmax], shrink=0.7)
    cbar.ax.set_yticklabels(["0", "{:2.2f}".format(newmax)])

    plt.savefig("penalties.png")
    plt.savefig("penalties.pdf", bbox_inches="tight")