def bayesian_neural_net_hmc_demo():
    """
    Trains a bayesian neural net on the training set using Hamiltonian Monte Carlo.
    """

    xs, ys = create_dataset()
    net = create_net()
    tst_data, X, Y = create_grid(-12, 12, 50)

    # make predictions on a grid of points
    trn_target, trn_loss = lf.CrossEntropy(net.output)
    regularizer = lf.WeightDecay(net.parms, wdecay)
    sampler = trainers.HMC(
        model=net,
        trn_data=[xs, ys],
        trn_loss=xs.shape[0] * trn_loss + regularizer,
        trn_target=trn_target
    )
    ensemble = sampler.gen(
        n_samples=2000,
        L=100,
        me=0.3,
        show_traces=True
    )
    avg_pred = ensemble.eval(tst_data)

    # plot the prediction surface
    fig = plt.figure()
    ax = fig.gca(projection='3d')
    Z = avg_pred.reshape(list(X.shape))
    ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm, linewidth=0)
    ax.plot(xs[ys == 0, 0], xs[ys == 0, 1], 'b.', ms=12)
    ax.plot(xs[ys == 1, 0], xs[ys == 1, 1], 'r.', ms=12)
    ax.view_init(elev=90, azim=-90)
    plt.xlabel('x1')
    plt.ylabel('x2')
    plt.axis('equal')
    ax.axis([-12, 12, -12, 12])
    fig.suptitle('Bayesian prediction surface')

    # plot the prediction surfaces of a few sample networks
    fig = plt.figure()
    fig.suptitle('Sample prediction surfaces')

    for c, i in enumerate(rng.randint(0, ensemble.n_diff_models, 6)):

        ax = fig.add_subplot(2, 3, c+1, projection='3d')
        Z = ensemble.eval_model(i, tst_data).reshape(list(X.shape))
        ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm, linewidth=0)
        ax.plot(xs[ys == 0, 0], xs[ys == 0, 1], 'b.', ms=12)
        ax.plot(xs[ys == 1, 0], xs[ys == 1, 1], 'r.', ms=12)
        ax.view_init(elev=90, azim=-90)
        plt.xlabel('x1')
        plt.ylabel('x2')
        plt.axis('equal')
        ax.axis([-12, 12, -12, 12])

    plt.show()
Пример #2
0
def train(model, a):

    assert is_data_loaded(), 'Dataset hasn\'t been loaded'

    regularizer = lf.WeightDecay(model.parms, weight_decay_rate)

    trainer = trainers.SGD(model=model,
                           trn_data=[data.trn.x],
                           trn_loss=model.trn_loss + regularizer,
                           val_data=[data.val.x],
                           val_loss=model.trn_loss,
                           step=ss.Adam(a=a))

    trainer.train(minibatch=minibatch,
                  patience=patience,
                  monitor_every=monitor_every)
def fit_neural_net_demo():
    """
    Fits a non-bayesian neural net to the training data by minimizing cross entropy.
    """

    xs, ys = create_dataset()
    net = create_net()

    # train the net
    trn_target, trn_loss = lf.CrossEntropy(net.output)
    regularizer = lf.WeightDecay(net.parms, wdecay)
    trainer = trainers.SGD(
        model=net,
        trn_data=[xs, ys],
        trn_loss=trn_loss + regularizer / xs.shape[0],
        trn_target=trn_target
    )
    trainer.train(tol=1.0e-9, monitor_every=10, show_progress=True)

    # make predictions
    tst_data, X, Y = create_grid(-12, 12, 50)
    pred = net.eval(tst_data)

    # plot the prediction surface
    fig = plt.figure()
    ax = fig.gca(projection='3d')
    Z = pred.reshape(list(X.shape))
    ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm, linewidth=0)
    ax.plot(xs[ys == 0, 0], xs[ys == 0, 1], 'b.', ms=12)
    ax.plot(xs[ys == 1, 0], xs[ys == 1, 1], 'r.', ms=12)
    ax.view_init(elev=90, azim=-90)
    plt.xlabel('x1')
    plt.ylabel('x2')
    plt.axis('equal')
    ax.axis([-12, 12, -12, 12])
    fig.suptitle('Prediction surface of trained net')

    plt.show()