コード例 #1
0
def test_mnist():
    # Gradient check using MNIST
    (train_x, _), (_, _) = mnist.load_data()
    train_x = train_x / 255  # Normalizing images
    # plotter.plot_mnist(train_x, "original")                           # Show original mnist images

    num_img, img_dim, _ = train_x.shape  # Get number of images and # pixels per square img
    num_features = 500
    mnist_in = np.reshape(
        train_x, (img_dim * img_dim,
                  num_img))  # Reshape images to match autoencoder input
    ga = Algorithm(x=mnist_in, num_features=num_features, debug=1, pop_size=20)
    w_out, best_cost, logs = ga.run()

    print(
        f"Average time/generation (sec): {sum(logs['times']) / len(logs['times'])}"
    )
    print(f"Total time to run GA (sec): {logs['times']}")

    ae = AutoEncoder(mnist_in, num_features, random_seed=1234, use_gpu=True)
    z, _ = ae.psi(w_out)
    phi_w_img = ae.phi(w_out)  # Calculate phi(W)
    new_mnist = z @ phi_w_img  # Recreate original images using Z and phi(W)
    new_imgs = np.reshape(
        new_mnist, train_x.shape)  # Reshape new images have original shape
    plotter.plot_mnist(new_imgs,
                       f"{num_features}_features_ga")  # Show new images

    # print(loss_values)
    plotter.plot_loss(logs['min'], "MNIST_Gradient_Loss_Over_Generations")
コード例 #2
0
def test_mnist(num_epochs=None):
    # Gradient check using MNIST
    (train_x, _), (_, _) = mnist.load_data()
    train_x = train_x / 255  # Normalizing images
    # plotter.plot_mnist(train_x, "original")                           # Show original mnist images

    num_img, img_dim, _ = train_x.shape  # Get number of images and # pixels per square img
    learning_rate = 0.5
    num_features = 200
    loss_values = []  # Keep track of loss values over epochs
    loss_values_less = []
    loss_diffs = []

    w_in = np.random.normal(
        size=(img_dim * img_dim,
              num_features))  # Generate random W matrix to test
    mnist_in = np.reshape(
        train_x, (img_dim * img_dim,
                  num_img))  # Reshape images to match autoencoder input
    ae = AutoEncoder(mnist_in, num_features, random_seed=1234, use_gpu=True)
    start_time = time.time()
    times = []
    if num_epochs:
        for epoch in range(num_epochs):
            w_in, z_grd = do_epoch(ae, w_in, learning_rate, loss_values, times,
                                   loss_values_less, loss_diffs, epoch,
                                   start_time)
    else:
        epoch_history_check = 5
        epoch = 0
        loss_avg = 1000
        tol = 0.03
        while loss_avg > tol:
            w_in, z_grd = do_epoch(ae, w_in, learning_rate, loss_values, times,
                                   loss_values_less, loss_diffs, epoch,
                                   start_time)
            loss_check = loss_diffs[-epoch_history_check:]
            loss_avg = sum(loss_check) / len(loss_check)
            epoch += 1

    print(
        f"Total time to run gradient decent (sec): {time.time() - start_time}")
    phi_w_img = ae.phi(w_in)  # Calculate phi(W)
    new_mnist = z_grd @ phi_w_img  # Recreate original images using Z and phi(W)
    new_imgs = np.reshape(
        new_mnist, train_x.shape)  # Reshape new images have original shape
    plotter.plot_mnist(new_imgs,
                       f"{num_features}_features_gradient")  # Show new images

    # print(loss_values)
    plotter.plot_loss(loss_values, "MNIST_Gradient_Loss_Over_Epochs")
    plotter.plot_loss(
        loss_values_less,
        "MNIST_Gradient_Loss_Over_Epochs_all_epochs_except_zero")
コード例 #3
0
def test_cifar10(num_epochs=None):
    # (train_x, _), (_, _) = cifar10.load_data()
    (_, _), (train_x, _) = cifar10.load_data()
    print(train_x.shape)
    plotter.plot_mnist(train_x, "original")
    train_x = rgb2gray(train_x)
    train_x = train_x / 255
    plotter.plot_mnist(train_x, "grayscale")
    num_img, img_h, img_w = train_x.shape
    print(train_x.shape)
    learning_rate = 0.5
    num_features = 768;
    loss_values = []
    loss_values_less = []
    loss_diffs = []

    w_in = np.random.normal(size=(img_h * img_w, num_features))
    cifar_in = np.reshape(train_x, (img_h * img_w, num_img))
    # cifar_in = np.reshape(train_x, (img_h, img_w, num_img*img_ch))
    print(cifar_in.shape)

    ae = AutoEncoder(cifar_in, num_features, random_seed=1234, use_gpu=True)
    start_time = time.time()
    times = []
    if num_epochs:
        for epoch in range(num_epochs):
            w_in, z_grd = do_epoch(ae, w_in, learning_rate, loss_values, times, loss_values_less, loss_diffs, epoch,
                                   start_time)
    else:
        epoch_history_check = 5
        epoch = 0
        loss_avg = 1000
        tol = 0.03
        while loss_avg > tol:
            w_in, z_grd = do_epoch(ae, w_in, learning_rate, loss_values, times, loss_values_less, loss_diffs, epoch,
                                   start_time)
            loss_check = loss_diffs[-epoch_history_check:]
            loss_avg = sum(loss_check) / len(loss_check)
            epoch += 1

    print(f"Total time to run gradient decent (sec): {time.time() - start_time}")
    phi_w_img = ae.phi(w_in)  # Calculate phi(W)
    new_cifar = z_grd @ phi_w_img  # Recreate original images using Z and phi(W)
    print(new_cifar.shape)
    new_imgs = np.reshape(new_cifar, train_x.shape)  # Reshape new images have original shape
    plotter.plot_mnist(new_imgs, f"{num_features}_features_gradient")  # Show new images

    # print(loss_values)
    plotter.plot_loss(loss_values, "CIFAR10_Gradient_Loss_Over_Epochs")
    plotter.plot_loss(loss_values_less, "CIFAR10_Gradient_Loss_Over_Epochs_all_epochs_except_zero")
    # return train_x
    return new_imgs