Beispiel #1
0
def main():
    '''
    Trains two networks on the MNIST dataset.
    Both have two hidden ReLU layers with 256 and 128 units
    The first one has a mean batch normalization layer before every layer
    '''

    np.random.seed(42)
    n_classes = 10
    dim = 784

    inputs, labels = load_normalized_mnist_data()

    # Define network with batch norm
    netBN = Network(learning_rate=1e-3)
    netBN.add_layer(Linear(dim, 256))
    netBN.add_layer(BatchNorm(256))
    netBN.add_layer(ReLU())
    netBN.add_layer(BatchNorm(256))
    netBN.add_layer(Linear(256, 128))
    netBN.add_layer(BatchNorm(128))
    netBN.add_layer(ReLU())
    netBN.add_layer(BatchNorm(128))
    netBN.add_layer(Linear(128, n_classes))
    netBN.set_loss(SoftmaxCrossEntropyLoss())

    train_network(netBN, inputs, labels, 50)
    test_loss_BN, test_acc_BN = validate_network(netBN,
                                                 inputs['test'],
                                                 labels['test'],
                                                 batch_size=128)
    print('MLP Network with batch normalization:')
    print('Test loss:', test_loss_BN)
    print('Test accuracy:', test_acc_BN)

    # Define network without batch norm
    np.random.seed(42)
    net = Network(learning_rate=1e-3)
    net.add_layer(Linear(dim, 256))
    net.add_layer(ReLU())
    net.add_layer(Linear(256, 128))
    net.add_layer(ReLU())
    net.add_layer(Linear(128, n_classes))
    net.set_loss(SoftmaxCrossEntropyLoss())

    train_network(net, inputs, labels, 50)
    test_loss, test_acc = validate_network(net,
                                           inputs['test'],
                                           labels['test'],
                                           batch_size=128)
    print('Baseline MLP Network without batch normalization:')
    print('Test loss:', test_loss)
    print('Test accuracy:', test_acc)
Beispiel #2
0
def main():
    '''
    Trains two networks on the MNIST dataset.
    Both have two hidden ReLU layers with 256 and 128 units
    The fist one has a mean batch normalization layer before every layer
    '''
    np.random.seed(42)
    n_classes = 10
    dim = 784

    inputs, labels = load_normalized_mnist_data()

    # Define network without batch norm
    net = Network(learning_rate=1e-3)
    net.add_layer(Linear(dim, 256, 'L0_LIN'))
    net.add_layer(ReLU('L0_RELU'))
    net.add_layer(Linear(256, 128, 'L1_LIN'))
    net.add_layer(ReLU('L1_RELU'))
    net.add_layer(Linear(128, n_classes, 'L2_LIN'))
    net.set_loss(SoftmaxCrossEntropyLoss())

    #train_network(net, inputs, labels, 50)
    train_network(net, inputs, labels, 1)
    #test_loss, test_acc = validate_network(net, inputs['test'], labels['test'],
    #                                       batch_size=128)
    #print('Baseline MLP Network without batch normalization:')
    #print('Test loss:', test_loss)
    #print('Test accuracy:', test_acc)
    print('main finish')
Beispiel #3
0
def main():
    '''
    The first layer of this network is dilated conv layer 
    The second layer of this work is fully connected layer
    '''
    np.random.seed(42)
    n_classes = 10
    dim = 784
    batch_norm = True

    inputs, labels = load_normalized_mnist_data()

    net = Network(learning_rate=1e-3)
    net.add_layer(DilatedConv(8, 3, 2, 1, 2))
    net.add_layer(ReLU())
    net.add_layer(Linear(8 * 784, 128))
    net.add_layer(ReLU())
    net.add_layer(Linear(128, n_classes))
    net.set_loss(SoftmaxCrossEntropyLoss())

    train_network(net, inputs, labels, 50)
    test_loss, test_acc = validate_network(net,
                                           inputs['test'],
                                           labels['test'],
                                           batch_size=128)

    print('Baseline MLP Network with Conv:')
    print('Test loss:', test_loss)
    print('Test accuracy:', test_acc)
Beispiel #4
0
def CNN():
    '''
    CNN network on the MNIST dataset
    '''
    np.random.seed(42)
    n_classes = 10

    inputs, labels = load_mnist_images()

    # Define network without batch norm
    net = Network(learning_rate = 1e-3)
    net.add_layer(Convolution2D(1,2,28,28,pad=0,stride=1,filter_size=3,dilation=2))
    net.add_layer(ReLU())
    net.add_layer(BatchNorm(800))
    net.add_layer(Linear(800, 128))
    net.add_layer(ReLU())
    net.add_layer(BatchNorm(128))
    net.add_layer(Linear(128, n_classes))
    net.set_loss(SoftmaxCrossEntropyLoss())

    train_network(net, inputs, labels, 250)
    test_loss, test_acc = validate_network(net, inputs['test'], labels['test'],
                                            batch_size=128)
    print('Baseline CNN Network with batch normalization:')
    print('Test loss:', test_loss)

    print('Test accuracy:', test_acc)
    return net
Beispiel #5
0
def Batch_Norm_implementation():
    '''
    Trains network on the MNIST dataset.
    Both have two hidden ReLU layers with 256 and 128 units with batch norm applied between layers
    The fist one has a mean batch normalization layer before every layer
    '''
    np.random.seed(42)
    n_classes = 10
    dim = 784

    inputs, labels = load_normalized_mnist_data()

    net = Network(learning_rate = 1e-3)
    net.add_layer(Linear(dim, 256))
    net.add_layer(BatchNorm(256))
    net.add_layer(ReLU())
    net.add_layer(BatchNorm(256))
    net.add_layer(Linear(256, 128))
    net.add_layer(ReLU())
    net.add_layer(BatchNorm(128))
    net.add_layer(Linear(128, n_classes))
    net.set_loss(SoftmaxCrossEntropyLoss())

    train_network(net, inputs, labels, 250)
    test_loss, test_acc = validate_network(net, inputs['test'], labels['test'],
                                           batch_size=128)
    print('Baseline MLP Network with batch normalization:')
    print('Test loss:', test_loss)
    print('Test accuracy:', test_acc)
    return net
Beispiel #6
0
def Vanilla_implementation():
    '''
    Trains two networks on the MNIST dataset.
    Both have two hidden ReLU layers with 256 and 128 units
    '''
    np.random.seed(42)
    n_classes = 10
    dim = 784

    inputs, labels = load_normalized_mnist_data()

    # Define network without batch norm
    net = Network(learning_rate = 1e-3)
    net.add_layer(Linear(dim, 256))
    net.add_layer(ReLU())
    net.add_layer(Linear(256, 128))
    net.add_layer(ReLU())
    net.add_layer(Linear(128, n_classes))
    net.set_loss(SoftmaxCrossEntropyLoss())

    train_network(net, inputs, labels, 250)
    test_loss, test_acc = validate_network(net, inputs['test'], labels['test'],
                                           batch_size=128)
    print('Baseline MLP Network without batch normalization:')
    print('Test loss:', test_loss)
    print('Test accuracy:', test_acc)
    return net
def generate_network_batch_norm():
    '''
    To generate a network with a batchnorm layer for gradient checking
    Returns:
        net:(data type:Network) Network defined for testing purposes, inthis case specifically for batch norm layer
    '''
    n_classes = 10
    dim = 784

    net = Network(learning_rate=1e-3)
    net.add_layer(Linear(dim, 256))
    net.add_layer(ReLU())
    net.add_layer(BatchNorm(256))
    net.add_layer(Linear(256, 128))
    net.add_layer(ReLU())
    net.add_layer(Linear(128, n_classes))
    net.set_loss(SoftmaxCrossEntropyLoss())
    return net
Beispiel #8
0
def generate_CNN():
    '''
    To generate a network with a CNN layer for gradient checking
    Returns:
        net:(data type:Network) Network defined for testing purposes, inthis case specifically for batch norm layer
    '''
    n_classes = 10

    net = Network(learning_rate=1e-3)
    net.add_layer(
        Convolution2D(1, 1, 28, 28, pad=0, stride=1, filter_size=3,
                      dilation=1))
    net.add_layer(ReLU())
    net.add_layer(BatchNorm(676))
    net.add_layer(Linear(676, 128))
    net.add_layer(ReLU())
    net.add_layer(BatchNorm(128))
    net.add_layer(Linear(128, n_classes))
    net.set_loss(SoftmaxCrossEntropyLoss())
    return net