def show_digits(grid=(4, 4)):
    X_train, y_train = load_digits_train_data()
    fig, axes = plt.subplots(grid[0], grid[1])
    X_train, y_train = load_digits_train_data()
    n = X_train.shape[0]
    ridx = np.random.randint(0, n, size=grid)
    for i in range(grid[0]):
        for j in range(grid[1]):
            axes[i][j].imshow(X_train[ridx[i][j], ].reshape(28, 28),
                              cmap=plt.get_cmap('bone'))
            axes[i][j].set_title('{0}'.format(y_train[ridx[i][j]]))
            axes[i][j].axis('off')
    plt.show()
def digits_visualize(epochs=1, batch_size=64, lr=0.01):
    sc = SoftmaxClassifier(num_classes=10)
    X_train, y_train = load_digits_train_data()
    sc.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, lr=lr)
    w = sc.W
    rs = w.reshape(28, 28, 10, order='F')
    rs2 = np.transpose(rs, axes=[1, 0, 2])
    fig, ax = plt.subplots()
    ax.imshow(rs2.reshape(28, -1, order='F'), cmap='bone')
    ax.set_title('digits weight vector visualized')
    export_fig(fig, 'softmax_weight_vector.png')
    plt.show()
def digits_test(epochs=10, batch_size=32, lr=0.05):
    print(
        'digits test: params - epochs {0}, batch_size: {1}, learning rate: {2}'
        .format(epochs, batch_size, lr))
    sc = SoftmaxClassifier(num_classes=10)
    X_train, y_train = load_digits_train_data()
    X_test, y_test = load_digits_test_data()
    sc.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, lr=lr)
    print_score(sc, X_train, X_test, y_train, y_test)
    fig, ax = plt.subplots()
    hist = sc.history
    ax.plot(np.array(range(1, 1 + len(hist))), hist, 'b-x')
    ax.set_xlabel('epoch')
    ax.set_ylabel('Ein (1/n NLL)')
    ax.set_title('softmax cost on digits as function of epoch')
    export_fig(fig, 'softmax_cost_per_epoch.png')
    plt.show()