示例#1
0
def main(argv):
    args = parser.parse_args(argv[1:])
    width = 45
    train_x, train_y = mnist.train()
    test_x, test_y = mnist.test()

    if args.central:
        print("Use centralize now")
        width = 32
        train_x = mnist.train_32()
        test_x = mnist.test_32()

    train_x = train_x / 256
    test_x = test_x / 256

    train_y = train_y.astype(np.int32)
    test_y = test_y.astype(np.int32)
    mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn,
                                              params={"width": width})
    train_input_fn = tf.estimator.inputs.numpy_input_fn(x={"x": train_x},
                                                        y=train_y,
                                                        batch_size=100,
                                                        num_epochs=None,
                                                        shuffle=True)
    mnist_classifier.train(input_fn=train_input_fn, steps=10000)
    eval_input_fn = tf.estimator.inputs.numpy_input_fn(x={"x": test_x},
                                                       y=test_y,
                                                       num_epochs=1,
                                                       shuffle=False)
    eval_results = mnist_classifier.evaluate(input_fn=eval_input_fn)
    print(eval_results)
示例#2
0
def go_svm(proc, pca_enabled, central):
    print("SVM")
    print("0-1", proc)  # only 0 and 1
    print("Central", central)  # 32x32
    print("PCA", pca_enabled)  # PCA to 50 dims

    train_x, train_y = mnist.train()
    test_x, test_y = mnist.test()

    if central:
        train_x = mnist.train_32()
        test_x = mnist.test_32()

    if proc:
        with Timer("process"):
            train_x = process(train_x)
            test_x = process(test_x)

    train_x = train_x.reshape((train_x.shape[0], -1))
    test_x = test_x.reshape((test_x.shape[0], -1))

    if pca_enabled:
        with Timer("PCA"):
            pca = PCA(n_components=50, whiten=True)
            train_x = pca.fit_transform(train_x)
            test_x = pca.transform(test_x)

    with Timer("train"):
        clf = svm.SVC(cache_size=7000)
        clf.fit(train_x, train_y)
        print("Accuracy:", clf.score(test_x, test_y))
示例#3
0
def go(proc, central):
    print("kNN")
    print("0-1", proc)
    print("central", central)

    train_x, train_y = mnist.train()
    test_x, test_y = mnist.test()

    if central:
        train_x = mnist.train_32()
        test_x = mnist.test_32()

    if proc:
        with Timer("process"):
            train_x = process(train_x)
            test_x = process(test_x)

    train_x = train_x.reshape((train_x.shape[0], -1))
    test_x = test_x.reshape((test_x.shape[0], -1))

    with Timer("kNN fit"):
        neigh = KNeighborsClassifier(n_neighbors=5, n_jobs=-1)
        neigh.fit(train_x, train_y)

    with Timer("kNN test"):
        print("Accuracy:", neigh.score(test_x, test_y))
示例#4
0
def go(pca_enabled=False, centralize=False):
    print("PCA:", pca_enabled)
    print("Centralize:", centralize)

    train_x, train_y = mnist.train()
    test_x, test_y = mnist.test()

    if centralize:
        train_x = mnist.train_32()
        test_x = mnist.test_32()

    train_x = train_x.reshape((train_x.shape[0], -1))
    test_x = test_x.reshape((test_x.shape[0], -1))

    if pca_enabled:
        with Timer("PCA"):
            pca = PCA(n_components=50, whiten=True)
            train_x = pca.fit_transform(train_x)
            test_x = pca.transform(test_x)

    with Timer("train"):
        max_iter = 1000 if centralize or pca_enabled else 200
        clf = MLPClassifier(max_iter=max_iter, verbose=True)
        clf.fit(train_x, train_y)
        print("Accuracy:", clf.score(test_x, test_y))
示例#5
0
def run():
    m = mnist.My_VAE_V2(10)
    mnist.train(m, 10)
    mnist.test(m)  # will output dd.png
    return m
示例#6
0
        teacher_out = teacher.forward_pass(x)
        student.train(x, teacher_out)


if __name__ == "__main__":
    teacher_layers = [
        Layer(784, 16, LeakyReLU()),
        Layer(16, 16, LeakyReLU()),
        Layer(16, 10, LeakyReLU()),
    ]
    teacher_net = NeuralNetwork(teacher_layers, CrossEntropyLoss(), 0.001)

    train_data = load_data("mnistdata/mnist_train.csv",
                           delimiter=",",
                           dtype=int)
    train(teacher_net, train_data)

    test_data = load_data("mnistdata/mnist_test.csv", delimiter=",", dtype=int)
    accuracy = test(teacher_net, test_data)
    print(f"Accuracy of the teacher net is {100*accuracy:.2f}")

    student_layers = [
        Layer(784, 10, Sigmoid()),
    ]
    student_net = NeuralNetwork(student_layers, MSELoss(), 0.005)

    train_student(student_net, teacher_net, train_data)

    student_accuracy = test(student_net, test_data)
    print(f"Accuracy of the student net is {100*accuracy:.2f}")