Ejemplo n.º 1
0
Archivo: nn.py Proyecto: emmamme/ML-A2
def main():
    """Trains a NN."""
   
    # Hyper-parameters. Modify them if needed.



    """         num_hiddens = [16, 32]
                eps = 0.01
                momentum = 0.0
                num_epochs = 1000
                batch_size = 100
    """

    # Input-output dimensions.
    num_inputs = 2304
    num_outputs = 7

    # Initialize model.
    model = InitNN(num_inputs, num_hiddens, num_outputs)

    # Uncomment to reload trained model here.
    model = Load('nn_model_20.npz')
    stats = Load('nn_stats_20.npz')
    title1 = 'nn momentum=0.9 8 16 Cross Entropy'
    title2 = 'nn momentum=0.9 8 16 Accuracy'

    DisplayPlot(stats['train_ce'], stats['valid_ce'], 'Cross Entropy', title1,number=0)
    DisplayPlot(stats['train_acc'], stats['valid_acc'], 'Accuracy', title2,number=1)

    """ 
    stats = {
            'train_ce': train_ce_list,
            'valid_ce': valid_ce_list,
            'train_acc': train_acc_list,
            'valid_acc': valid_acc_list}
    """


    # Check gradient implementation.
    print('Checking gradients...')
    x = np.random.rand(10, 48 * 48) * 0.1
    CheckGrad(model, NNForward, NNBackward, 'W3', x)
    CheckGrad(model, NNForward, NNBackward, 'b3', x)
    CheckGrad(model, NNForward, NNBackward, 'W2', x)
    CheckGrad(model, NNForward, NNBackward, 'b2', x)
    CheckGrad(model, NNForward, NNBackward, 'W1', x)
    CheckGrad(model, NNForward, NNBackward, 'b1', x)
Ejemplo n.º 2
0
def main():
    """Trains a NN."""
    model_fname = 'nn_model.npz'
    stats_fname = 'nn_stats.npz'

    # Hyper-parameters. Modify them if needed.
    num_hiddens = [16, 32]
    eps = 0.01
    momentum = 0.5
    num_epochs = 1000
    batch_size = 100

    # Input-output dimensions.
    num_inputs = 2304
    num_outputs = 7

    # Initialize model.
    model = InitNN(num_inputs, num_hiddens, num_outputs)

    # Uncomment to reload trained model here.
    model = Load(model_fname)

    # Check gradient implementation.
    print('Checking gradients...')
    x = np.random.rand(10, 48 * 48) * 0.1
    CheckGrad(model, NNForward, NNBackward, 'W3', x)
    CheckGrad(model, NNForward, NNBackward, 'b3', x)
    CheckGrad(model, NNForward, NNBackward, 'W2', x)
    CheckGrad(model, NNForward, NNBackward, 'b2', x)
    CheckGrad(model, NNForward, NNBackward, 'W1', x)
    CheckGrad(model, NNForward, NNBackward, 'b1', x)

    # Train model.
    stats = Train(model, NNForward, NNBackward, NNUpdate, eps, momentum,
                  num_epochs, batch_size)
Ejemplo n.º 3
0
def load_aug_data():
    train_file = '../411a3/train_aug.npz'
    train = Load(train_file)

    X_train = train['X_train']
    y_train = train['y_train']
    X_test = train['X_test']
    y_test = train['y_test']

    rnd_idx = np.arange(X_train.shape[0])
    np.random.shuffle(rnd_idx)
    X_train = X_train[rnd_idx]
    y_train = y_train[rnd_idx]

    X_train = X_train.reshape((-1, 1, 32, 30))
    X_test = X_test.reshape((-1, 1, 32, 30))

    X_train = X_train.astype('float32')
    X_test = X_test.astype('float32')
    y_train = y_train.astype('int32')
    y_test = y_test.astype('int32')

    # print('X_train shape:', X_train.shape)
    # print('X_test shape:', X_test.shape)
    # print('y_train shape:', y_train.shape)
    # print('y_test shape:', y_test.shape)

    return (X_train, y_train), (X_test, y_test)
Ejemplo n.º 4
0
def main():
    """Trains a CNN."""
    model_fname = 'cnn_model.npz'
    stats_fname = 'cnn_stats.npz'

    # Hyper-parameters. Modify them if needed.
    eps = 0.1
    momentum = 0
    num_epochs = 30
    filter_size = 5
    num_filters_1 = 8
    num_filters_2 = 16
    batch_size = 100

    # Input-output dimensions.
    num_channels = 1
    num_outputs = 7

    # Initialize model.
    model = InitCNN(num_channels, filter_size, num_filters_1, num_filters_2,
                    num_outputs)

    # Uncomment to reload trained model here.
    model = Load(model_fname)
    ShowMeans2(model['W1'], 0)

    # Check gradient implementation.
    print('Checking gradients...')
    x = np.random.rand(10, 48, 48, 1) * 0.1
    CheckGrad(model, CNNForward, CNNBackward, 'W3', x)
    CheckGrad(model, CNNForward, CNNBackward, 'b3', x)
    CheckGrad(model, CNNForward, CNNBackward, 'W2', x)
    CheckGrad(model, CNNForward, CNNBackward, 'b2', x)
    CheckGrad(model, CNNForward, CNNBackward, 'W1', x)
    CheckGrad(model, CNNForward, CNNBackward, 'b1', x)
Ejemplo n.º 5
0
def plot_first_filters(model_fname):
    model = Load(model_fname)
    W1 = model['W1']
    plt.clf()
    for i in xrange(W1.shape[3]):
        plt.subplot(1, W1.shape[3], i + 1)
        plt.imshow(W1[:, :, 0, i], cmap=plt.cm.gray)
    plt.savefig('visualization/cnn_first_layer_filters.png')
    print("cnn_first_layer_filters.png saved")
Ejemplo n.º 6
0
def plot_figures(stats_fname):
    stats = Load(stats_fname)
    train_ce_list = stats['train_ce']
    valid_ce_list = stats['valid_ce']
    train_acc_list = stats['train_acc']
    valid_acc_list = stats['valid_acc']
    figure_path = get_cnn_path(stats_fname)
    save_figure(train_ce_list, valid_ce_list, 'Cross Entropy', figure_path)
    save_figure(train_acc_list, valid_acc_list, 'Accuracy', figure_path)
    print('Figures saved to ' + figure_path)
Ejemplo n.º 7
0
 def _get_generator_list(self, dst_node, para_list):
     """returns the default generator list"""
     res = []
     for state in para_list:
         s = Load(state)
         s['ipsrc'] = choose_ip_addr(self.ipdests).rsplit('/')[0]
         s['ipdst'] = choose_ip_addr(dst_node.ipdests).rsplit('/')[0]
         gen = get_generator(s)
         res.append(gen)
     return res
Ejemplo n.º 8
0
 def _config_traffic(self):
     nn = len(self.net.node_list)
     srv_node_list = [self.net.node_list[i] for i in xrange(nn) if i in self.net.net_desc['srv_list'] ]
     start, end = self.ano_desc['T']
     for srv_node in srv_node_list:
         gen_desc = Load(self.ano_desc['gen_desc'])
         gen_desc['ipsrc'] = choose_ip_addr(self.ano_node.ipdests).rsplit('/')[0]
         gen_desc['ipdst'] = choose_ip_addr(srv_node.ipdests).rsplit('/')[0]
         self.ano_node.add_modulator(start=str(start),
                 profile='((%d,),(1,))' %(end-start),
                 generator=get_generator(gen_desc) )
Ejemplo n.º 9
0
def load_data():
    nb_test = 350
    train_file = '../411a3/train.npz'
    train = Load(train_file)
    X_train = train['X_train']
    y_train = train['y_train']
    rnd_idx = np.arange(X_train.shape[0])
    np.random.shuffle(rnd_idx)
    X_train = X_train[rnd_idx]
    y_train = y_train[rnd_idx]
    X_train = X_train.reshape((-1, 1, 32, 30))
    return (X_train[nb_test:], y_train[nb_test:]), (X_train[:nb_test],
                                                    y_train[:nb_test])
Ejemplo n.º 10
0
def main():
    """Trains a NN."""
    model_fname = 'nn_model.npz'
    stats_fname = 'nn_stats.npz'

    # Hyper-parameters. Modify them if needed.
    num_hiddens = [16, 32]
    eps = 0.01
    momentum = 0.3
    num_epochs = 250
    batch_size = 100
    # 100 default

    # Input-output dimensions.
    num_inputs = 2304
    num_outputs = 7

    # Initialize model.
    model = InitNN(num_inputs, num_hiddens, num_outputs)

    model['v'] = momentum

    # Uncomment to reload trained model here.
    model = Load(model_fname)
    #ShowMeans(model['W1'],0)
    inputs_train, inputs_valid, inputs_test, target_train, target_valid, target_test = LoadData(
        '../toronto_face.npz')
    var = NNForward(model, inputs_train[119, :])
    y = var['y']
    y = Softmax(y)
    t = target_train[119, :]
    plotProb(y, t)

    # Check gradient implementation.
    print('Checking gradients...')
    x = np.random.rand(10, 48 * 48) * 0.1
    CheckGrad(model, NNForward, NNBackward, 'W3', x)
    CheckGrad(model, NNForward, NNBackward, 'b3', x)
    CheckGrad(model, NNForward, NNBackward, 'W2', x)
    CheckGrad(model, NNForward, NNBackward, 'b2', x)
    CheckGrad(model, NNForward, NNBackward, 'W1', x)
    CheckGrad(model, NNForward, NNBackward, 'b1', x)
Ejemplo n.º 11
0
import matplotlib.pyplot as plt

#stats_fname = 'nn_stats.npz'
#stats_fname = 'cnn_stats.npz'

#stats_fname = 'cnn_stats_32_eps99.npz'

#stats_fname = 'cnn_stats_32_mom09.npz'

#stats_fname = 'cnn_stats_32_batch150.npz'

#stats_fname = 'nn_stats_33_50_100.npz'

stats_fname = 'cnn_stats_33_5_10.npz'

stats = Load(stats_fname)

train = np.array(stats['train_ce'])
valid = np.array(stats['valid_ce'])

plt.figure(1)
plt.plot(train[:, 0], train[:, 1], 'b', label='Train', color='r')
plt.plot(valid[:, 0], valid[:, 1], 'g', label='Validation', color='c')
plt.xlabel('Epoch')

plt.ylabel('Cross Entropy')

plt.title('Training and Validation Entropies of CNN with Filters=5;10')

plt.legend()
plt.show()
Ejemplo n.º 12
0
def count_probability(x, model_fname, forward):
    model = Load(model_fname)
    var = forward(model, x)
    return Softmax(var['y'])
Ejemplo n.º 13
0
def load_val_data():
    val_file = '../411a3/val.npz'
    val = Load(val_file)
    X_val = val['X_val']
    X_val = X_val.reshape((-1, 1, 32, 30))
    return X_val
Ejemplo n.º 14
0
from util import LoadData, Load, Save, DisplayPlot


def ShowMeans(means, number=0):
    """Show the cluster centers as images."""
    plt.figure(number)
    plt.clf()
    for i in xrange(means.shape[1]):
        plt.subplot(1, means.shape[1], i + 1)
        plt.imshow(means[:, i].reshape(48, 48), cmap=plt.cm.gray)
    plt.draw()
    raw_input('Press Enter.')


def ShowMeansCNN(means, number=0):
    """Show the cluster centers as images."""
    plt.figure(number)
    plt.clf()
    for i in xrange(means.shape[3]):
        plt.subplot(1, means.shape[3], i + 1)
        plt.imshow(means[:, :, 0, i], cmap=plt.cm.gray)
    plt.draw()
    raw_input('Press Enter.')


nn = Load('nn_model.npz')
ShowMeans(nn['W1'], number=0)

cnn = Load('cnn_model.npz')
ShowMeansCNN(cnn['W1'], number=1)
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 13 23:14:04 2016

3.4 Plots first layer of weights 
@author: risal
"""

from util import Load, Save, DisplayPlot
import matplotlib.pylab as plt
import numpy as np
from nn import Softmax

fname_1 = 'nn_model_confidence.npz'
fname_2 = 'cnn_model_batch_500.npz'
nn_model = Load(fname_1)
cnn_model = Load(fname_2)

#DisplayPlot(stats['train_ce'], stats['valid_ce'], 'Cross Entropy', number=0)
#DisplayPlot(stats['train_acc'], stats['valid_acc'], 'Accuracy', number=1)

def ShowWeightsNN(weights, number=0):
  """Show the weights centers as images."""
  plt.figure(number)
  plt.clf()
  for i in xrange(weights.shape[1]):
    plt.subplot(1, weights.shape[1], i+1)
    plt.imshow(weights[:, i].reshape(48, 48), cmap=plt.cm.gray)
  plt.draw()
  raw_input('Press Enter.')
Ejemplo n.º 16
0
def main():
    """Trains a NN."""
    model_fname = 'nn_model.npz'
    stats_fname = 'nn_stats.npz'

    # Hyper-parameters. Modify them if needed.
    num_hiddens = [16, 32]  #[150,20]  [6,2]  [200, 140]
    eps = [0.01]  #[0.001, 0.01, 0.1, 0.5, 1.0]
    momentum = [0.9]  #[0.0, 0.45, 0.9]
    num_epochs = 1000
    batch_size = [100]  #[1, 10, 100, 500, 1000]

    # Input-output dimensions.
    num_inputs = 2304
    num_outputs = 7

    # Initialize model.
    model = InitNN(num_inputs, num_hiddens, num_outputs)

    # Uncomment to reload trained model here.
    model = Load(
        "C:\\Users\\Adam\\Documents\\Masters\\Machine Learning\\A2\\NN\\NN_Results\\0.01\\0.0\\100\\nn_model.npz"
    )

    # Check gradient implementation.
    print('Checking gradients...')
    x = np.random.rand(10, 48 * 48) * 0.1
    CheckGrad(model, NNForward, NNBackward, 'W3', x)
    CheckGrad(model, NNForward, NNBackward, 'b3', x)
    CheckGrad(model, NNForward, NNBackward, 'W2', x)
    CheckGrad(model, NNForward, NNBackward, 'b2', x)
    CheckGrad(model, NNForward, NNBackward, 'W1', x)
    CheckGrad(model, NNForward, NNBackward, 'b1', x)
    print("passed grad check")

    inputs_train, inputs_valid, inputs_test, target_train, target_valid, target_test = LoadData(
        '../toronto_face.npz')
    #print(inputs_test.shape)
    Evaluate(inputs_test, target_test, model, NNForward, 1)

    for e in eps:
        for m in momentum:
            for b in batch_size:
                model = InitNN(num_inputs, num_hiddens, num_outputs)
                # Train model.
                dir = str(e) + "\\" + str(m) + "\\" + str(b) + "\\"

                if os.path.exists(dir):
                    filelist = [
                        f for f in os.listdir(".") if f.endswith(".bak")
                    ]
                    for f in filelist:
                        os.remove(f)
                else:
                    os.makedirs(dir)

                model, stats = Train(model, NNForward, NNBackward, NNUpdate, e,
                                     m, num_epochs, b)

                # Uncomment if you wish to save the model.
                Save(dir + model_fname, model)

                # Uncomment if you wish to save the training statistics.
                Save(dir + stats_fname, stats)

                SavePlot(0, dir + "cross_entropy.png")
                SavePlot(1, dir + "accuracy.png")