Example #1
0
def main():
    
    #import pdb
    #pdb.set_trace()
    
    ###################
    #BUILD THE DATASET#
    ###################
    print 'build the dataset'
    
    import pdb
    pdb.set_trace()
    train_set = SVHN_On_Memory(which_set='train')
    test_set = SVHN_On_Memory(which_set='test')
    
    
    
    train_setX, valid_setX = split(train_set.X, [50000], axis=0)
    train_sety, valid_sety = split(train_set.y, [50000], axis=0)
    
    test_set.y = nonzero(test_set.y)[1]
    train_sety = nonzero(train_sety)[1] 
    valid_sety = nonzero(valid_sety)[1]
    

    #import pdb
    #pdb.set_trace()
    ##################
    #BUILD THE LAYERS#
    ##################
    print 'build the layers'
    input_size = len(train_setX[0])
    
    

    h1 = NoisyRELU(prev_layer_size=input_size, this_layer_size=1000, threshold=5, noise_factor=1)
    h2 = NoisyRELU(prev_layer_size=h1.get_size(), this_layer_size=1000, threshold=5, noise_factor=1)
    h3 = NoisyRELU(prev_layer_size=h2.get_size(), this_layer_size=1000, threshold=5, noise_factor=1)

    
    
    output_layer = Softmax(prev_layer_size=h1.this_layer_size, this_layer_size=10, 
                           W_range=[0,0], b_range=[0,0])
    #y_layer = Sigmoid(prev_layer_size=h2.this_layer_size, this_layer_size=[10,1])
    

    mlp = MLP(input_size=input_size,
              layers=[h1, h2, h3, output_layer],
              train_set=[train_setX, train_sety],
              valid_set=[valid_setX, valid_sety],
              test_set=[test_set.X, test_set.y],
              error_function=loglikehood,
              batch_size=20,
              learning_rate=0.1)
     
    print 'start training'
    mlp.train(save_path='1000-1000-1000-noisy.pkl', save_freq=1)
Example #2
0
def main():
    
    #import pdb
    #pdb.set_trace()
    
    ###################
    #BUILD THE DATASET#
    ###################
    print 'build the dataset'
    train_set = MNIST(which_set='train', one_hot=False)
    test_set = MNIST(which_set='test', one_hot=False)
    
    train_setX, valid_setX = split(train_set.X, [50000], axis=0)
    train_sety, valid_sety = split(train_set.y, [50000], axis=0)
    

    #import pdb
    #pdb.set_trace()
    ##################
    #BUILD THE LAYERS#
    ##################
    print 'build the layers'
    input_size = len(train_setX[0])
    
    

    h1 = Tanh(prev_layer_size=input_size, this_layer_size=2000)
    output_layer = Softmax(prev_layer_size=h1.this_layer_size, this_layer_size=10, 
                           W_range=[0,0], b_range=[0,0])
    #y_layer = Sigmoid(prev_layer_size=h2.this_layer_size, this_layer_size=[10,1])
    

    mlp = MLP(input_size=input_size,
              layers=[h1, output_layer],
              train_set=[train_setX, train_sety],
              valid_set=[valid_setX, valid_sety],
              test_set=[test_set.X, test_set.y],
              error_function=loglikehood,
              batch_size=20,
              learning_rate=0.1)
     
    print 'start training'
    mlp.train()
        #p = plt.plot(mlp.epoch, mlp.valid_error)
        #plots.append(p)

    
    with open('batches_clean.pkl', 'wb') as bat:
        cPickle.dump(mlp.epoch, bat)
    with open('errors_clean.pkl', 'wb') as err:
        cPickle.dump(mlp.valid_error, err)
    
    with open('legends_clean.pkl', 'wb') as leg:
        cPickle.dump(['2000-tanh'], leg)
Example #3
0
def main():
    ###################
    #BUILD THE DATASET#
    ###################
    print 'build the dataset'
    train_set = MNIST(which_set='train', one_hot=False)
    test_set = MNIST(which_set='test', one_hot=False)

    train_setX, valid_setX = split(train_set.X, [50000], axis=0)
    train_sety, valid_sety = split(train_set.y, [50000], axis=0)

    #import pdb
    #pdb.set_trace()
    ##################
    #BUILD THE LAYERS#
    ##################
    print 'build the layers'
    input_size = len(train_setX[0])

    h1 = NoisyRELU(prev_layer_size=input_size,
                   this_layer_size=200,
                   noise_factor=0,
                   activation_threshold=0)
    output_layer = Softmax(prev_layer_size=h1.this_layer_size,
                           this_layer_size=10,
                           W_range=[0, 0],
                           b_range=[0, 0])
    #y_layer = Sigmoid(prev_layer_size=h2.this_layer_size, this_layer_size=[10,1])

    #     import pdb
    #     pdb.set_trace()
    print 'build the model'
    mlp = MLP(input_size=input_size,
              layers=[h1, output_layer],
              train_set=[train_setX, train_sety],
              valid_set=[valid_setX, valid_sety],
              test_set=[test_set.X, test_set.y],
              error_function=loglikehood,
              batch_size=20)

    print 'start training'
    mlp.train_batch(100000)
Example #4
0
def main():
    
    #import pdb
    #pdb.set_trace()
    
    ###################
    #BUILD THE DATASET#
    ###################
    print 'build the dataset'
    train_set = MNIST(which_set='train', one_hot=False)
    test_set = MNIST(which_set='test', one_hot=False)
    
    train_setX, valid_setX = split(train_set.X, [50000], axis=0)
    train_sety, valid_sety = split(train_set.y, [50000], axis=0)
    

    #import pdb
    #pdb.set_trace()
    ##################
    #BUILD THE LAYERS#
    ##################
    print 'build the layers'
    input_size = len(train_setX[0])
    
    

    h1 = NoisyRELU(prev_layer_size=input_size, this_layer_size=2000, noise_factor=1, threshold=5)
    output_layer = Softmax(prev_layer_size=h1.this_layer_size, this_layer_size=10, 
                           W_range=[0,0], b_range=[0,0])
    #y_layer = Sigmoid(prev_layer_size=h2.this_layer_size, this_layer_size=[10,1])
    
#     import pdb
#     pdb.set_trace()

#, 0.1, 0.5, 0.001, 0.005

    plots = []
    legends = []
    batches = []
    errors = []
    for learning_rate in [0.01, 0.05]:
        print 'build the model'
        print 'learning rate', learning_rate
        mlp = MLP(input_size=input_size,
                  layers=[h1, output_layer],
                  train_set=[train_setX, train_sety],
                  valid_set=[valid_setX, valid_sety],
                  test_set=[test_set.X, test_set.y],
                  error_function=loglikehood,
                  batch_size=20,
                  learning_rate=learning_rate)
     
        print 'start training'
        mlp.train()
        #p = plt.plot(mlp.epoch, mlp.valid_error)
        #plots.append(p)
        batches.append(mlp.epoch)
        errors.append(mlp.valid_error)
        legends.append(learning_rate)
    
    with open('batches.pkl', 'wb') as bat:
        cPickle.dump(batches, bat)
    with open('errors.pkl', 'wb') as err:
        cPickle.dump(errors, err)
    
    with open('legends.pkl', 'wb') as leg:
        cPickle.dump(legends, leg)