Ejemplo n.º 1
0
def print_perf(params, iter, gradient):
    if iter % 30 == 0:
        #save_images(sigmoid(params), 'q4plot.png')
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(sigmoid(params), ax)
        print(batched_loss(params, iter))
Ejemplo n.º 2
0
    log_prob = log_softmax(X, weight)
    avg = avg_log_likelihood(((log_prob)), Y)
    pred = np.argmax(np.exp(log_prob), axis=1)
    return pred, avg


if __name__ == "__main__":

    train_images = binarize_data(train_images)
    test_images = binarize_data(test_images)
    testY = np.argmax(test_labels, axis=1)
    trainY = np.argmax(train_labels, axis=1)
    weight, loss = optimization(0.1, train_images[:300], train_labels[:300])

    data.save_images(weight.T, "unreg_Q3_weight")
    data.plot_images(weight.T, plt.figure().add_subplot(111))
    plt.show()

    print("Train Set Result:")
    class_label = np.argmax(train_labels[:300], axis=1)
    pred, avg = prediction(train_images[:300], weight, class_label)
    print("avg prediction Error is {}".format(np.mean(pred != class_label)))
    print("The predictive avg log-likelihood is {}".format(avg))

    print("\n")

    print("Test Set Result:")
    class_label = np.argmax(test_labels, axis=1)
    pred, avg = prediction(test_images, weight, class_label)
    print("avg prediction Error is {}".format(np.mean(pred != class_label)))
    print("The predictive avg log-likelihood is {}".format(avg))
Ejemplo n.º 3
0
# Get gradient of objective using autograd.
objective_grad = elementwise_grad(batched_loss)


def print_perf(params, iter, gradient):
    if iter % 30 == 0:
        #save_images(sigmoid(params), 'q4plot.png')
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(sigmoid(params), ax)
        print(batched_loss(params, iter))


# The optimizers provided by autograd can optimize lists, tuples, or dicts of parameters.
# You may use these optimizers for Q4, but implement your own gradient descent optimizer for Q3!
optimized_params = adam(objective_grad,
                        init_params,
                        step_size=0.2,
                        num_iters=10,
                        callback=print_perf)
#optimized_params = adam(objective_grad, init_params, step_size=0.2, num_iters=10000, callback=print_perf)

#Q4D code: just need to run 2fs code given optimized_params
results = advanced_bayes(train_images[0:20, :], optimized_params)
fig = plt.figure(1)
fig.clf()
ax = fig.add_subplot(111)
plot_images(images, ax)
save_images(results, 'Q4D')
Ejemplo n.º 4
0
from tensorflow.keras.callbacks import ModelCheckpoint
from model import build_model

if __name__ == '__main__':
    X_train, y_train = data.get_data_generator(data.IMAGE_TRAIN_PATH,
                                               data.MASK_TRAIN_PATH)
    X_val, y_val = data.get_data_generator(data.IMAGE_VALIDATION_PATH,
                                           data.IMAGE_VALIDATION_PATH)

    model = build_model()
    model_checkpoint = ModelCheckpoint('unet_200_steps.hdf5',
                                       monitor='loss',
                                       verbose=1,
                                       save_best_only=True)

    history = model.fit(zip(X_train, y_train),
                        callbacks=[
                            model_checkpoint,
                        ],
                        epochs=200,
                        steps_per_epoch=234 // 16,
                        batch_size=16,
                        validation_data=(zip(X_val, y_val)),
                        validation_steps=10)

    X_test, y_test = data.get_data_generator(data.IMAGE_TEST_PATH,
                                             data.MASK_TEST_PATH)
    x = X_test.next()
    y = y_test.next()
    data.plot_images(y, model.predict(x)[0])