Пример #1
0
 def create_outlayer( self, o_node, o_label ):
     param = self.param
     if param.out_type == 'softmax':
         return nnet.SoftmaxLayer( o_node, o_label )
     elif param.out_type == 'linear':
         return nnet.RegressionLayer( o_node, o_label, param ) 
     elif param.out_type == 'logistic':
         return nnet.RegressionLayer( o_node, o_label, param )
     else:
         pass
Пример #2
0
 def create_outlayer(self, o_node, o_label):
     param = self.param
     if param.out_type == 'softmax':
         return nnet.SoftmaxLayer(o_node, o_label)
     elif param.out_type == 'linear':
         return nnet.RegressionLayer(o_node, o_label, param)
     elif param.out_type == 'logistic':
         return nnet.RegressionLayer(o_node, o_label, param)
     else:
         raise RuntimeError('NNConfig', 'unknown out_type')
Пример #3
0
def three_layer_test():
    Xtr, Ytr, Xte, Yte, label_names = test.get_cifar10_dataset()

    # Reshape each data point to be a 1-dimensional array, for a plain neural network.
    Xtr = Xtr.reshape(50000, 32 * 32 * 3)
    Xte = Xte.reshape(10000, 32 * 32 * 3)

    # PRE-PROCESSING
    Xtr = test.normalize(Xtr)
    Xte = test.normalize(Xte)

    mean = np.mean(np.concatenate([Xtr, Xte]), axis=0)
    Xtr = Xtr - mean
    Xte = Xte - mean

    Xtr = test.append_zeros(Xtr)
    Xte = test.append_zeros(Xte)

    # Neural Net
    nn = nnet.NeuralNetwork(Xtr.shape[1])
    nn.batch_size = 512

    nn.set_training_set(Xtr.T, Ytr)
    nn.set_testing_set(Xte.T, Yte)

    nn.add_layer(
        nnet.FullyConnectedLayer(pass_type="test|train",
                                 output_size=100,
                                 initialization_type='xavier'))
    nn.add_layer(nnet.BatchNormalizationLayer(pass_type="test|train"))
    nn.add_layer(
        nnet.ActivationLayer(pass_type="test|train",
                             activation_type="leaky_relu"))

    nn.add_layer(
        nnet.FullyConnectedLayer(pass_type="test|train",
                                 output_size=50,
                                 initialization_type='xavier'))
    nn.add_layer(nnet.BatchNormalizationLayer(pass_type="test|train"))
    nn.add_layer(
        nnet.ActivationLayer(pass_type="test|train",
                             activation_type="leaky_relu"))

    nn.add_layer(
        nnet.FullyConnectedLayer(pass_type="test|train",
                                 output_size=10,
                                 initialization_type='xavier'))
    nn.add_layer(nnet.SoftmaxLayer(pass_type="test"))
    nn.add_layer(nnet.LossLayer(pass_type="train"))

    # Print out each layer's information in order, then train.
    nn.print_info()
    final_loss, final_accuracy = nn.train(iterations=300)
    assert (final_loss < 1)
    assert (final_accuracy > 0.30)
Пример #4
0
def softmax(param):
    # setup network for softmax
    i_node = np.zeros((param.batch_size, param.input_size), 'float32')
    o_node = np.zeros((param.batch_size, param.num_class), 'float32')
    o_label = np.zeros((param.batch_size), 'int8')

    nodes = [i_node, o_node]
    layers = [
        nnet.FullLayer(i_node, o_node, param.init_sigma, param.rec_gsqr())
    ]

    layers += [nnet.SoftmaxLayer(o_node, o_label)]
    net = nnet.NNetwork(layers, nodes, o_label, factory)
    return net
Пример #5
0
def mlp2layer(param):
    factory = NNFactory(param)
    # setup network for 2 layer perceptron
    i_node = np.zeros((param.batch_size, param.input_size), 'float32')
    o_node = np.zeros((param.batch_size, param.num_class), 'float32')
    h1_node = np.zeros((param.batch_size, param.num_hidden), 'float32')
    h2_node = np.zeros_like(h1_node)
    o_label = np.zeros((param.batch_size), 'int8')

    nodes = [i_node, h1_node, h2_node, o_node]
    layers = [
        nnet.FullLayer(i_node, h1_node, param.init_sigma, param.rec_gsqr())
    ]
    layers += [nnet.ActiveLayer(h1_node, h2_node, param.node_type)]
    layers += [
        nnet.FullLayer(h2_node, o_node, param.init_sigma, param.rec_gsqr())
    ]
    layers += [nnet.SoftmaxLayer(o_node, o_label)]

    net = nnet.NNetwork(layers, nodes, o_label, factory)
    return net