Пример #1
0
def run():
    # Prepare data
    dataset = dp.datasets.MNIST()
    x, y = dataset.data(flat=True)
    x = x.astype(dp.float_)/255.0
    y = y.astype(dp.int_)
    train_idx, test_idx = dataset.split()
    x_train = x[train_idx]
    y_train = y[train_idx]
    x_test = x[test_idx]
    y_test = y[test_idx]
    train_input = dp.SupervisedInput(x_train, y_train, batch_size=128)
    test_input = dp.SupervisedInput(x_test, y_test)

    # Setup neural network
    nn = dp.NeuralNetwork(
        layers=[
            dp.Dropout(0.2),
            dp.DropoutFullyConnected(
                n_output=800,
                dropout=0.5,
                weights=dp.Parameter(dp.NormalFiller(sigma=0.01),
                                     penalty=('l2', 0.00001), monitor=True),
            ),
            dp.Activation('relu'),
            dp.DropoutFullyConnected(
                n_output=800,
                dropout=0.5,
                weights=dp.Parameter(dp.NormalFiller(sigma=0.01),
                                     penalty=('l2', 0.00001), monitor=True),
            ),
            dp.Activation('relu'),
            dp.DropoutFullyConnected(
                n_output=dataset.n_classes,
                weights=dp.Parameter(dp.NormalFiller(sigma=0.01),
                                     penalty=('l2', 0.00001), monitor=True),
            ),
            dp.MultinomialLogReg(),
        ],
    )

    # Train neural network
    def valid_error():
        return nn.error(test_input)
    trainer = dp.StochasticGradientDescent(
        max_epochs=50,
        learn_rule=dp.Momentum(learn_rate=0.1, momentum=0.9),
    )
    trainer.train(nn, train_input, valid_error)

    # Visualize weights from first layer
    W = next(np.array(layer.params()[0].values) for layer in nn.layers
             if isinstance(layer, dp.FullyConnected))
    W = np.reshape(W.T, (-1, 28, 28))
    dp.misc.img_save(dp.misc.img_tile(dp.misc.img_stretch(W)),
                     os.path.join('mnist', 'mlp_dropout_weights.png'))

    # Evaluate on test data
    error = nn.error(test_input)
    print('Test error rate: %.4f' % error)
    def __init__(self, recurrent_nodes, seq_size, batch_size, cyclic, dropout):
        self.recurrent_nodes = recurrent_nodes
        graph = digraph.DiGraph()
        split = VSplit()
        stack = VStack()
        recurrent_graph_nodes = []
        depth = len(recurrent_nodes)
        latches = []
        for i in range(depth):
            node = recurrent_nodes[i]
            recurrent_graph_nodes.append([])
            hidden_shape = (batch_size, node.n_hidden)
            latch = Latch(hidden_shape)
            latches.append((latch, LatchOut(latch))) 
            for j in range(seq_size):
                if j > 0:
                    orig_node = node
                    node = deepcopy(node)
                    node._params = [p.share() for p in orig_node._params]
                recurrent_graph_nodes[i].append(node)

                if i == 0:
                    graph.add_edge(split, node, ('y%i' % j, 'x'))
                else:
                    node_below = recurrent_graph_nodes[i-1][j]
                    if dropout > 0:
                        drop_layer = dp.Dropout(dropout)
                        graph.add_edge(node_below, drop_layer, ('y', 'x'))
                        node_below = drop_layer
                    graph.add_edge(node_below, node, ('y', 'x'))
                if i == depth-1:
                    graph.add_edge(node, stack, ('y', 'x%i' % j))
                
                if j == 0:
                    if cyclic:
                        graph.add_edge(latches[i][1], node, ('y', 'h'))
                    else:
                        constant = Constant(hidden_shape)
                        graph.add_edge(constant, node, ('y', 'h'))
                else:
                    node_prev = recurrent_graph_nodes[i][j-1]
                    graph.add_edge(node_prev, node, ('h', 'h'))
                if j == seq_size-1:
                    graph.add_edge(node, latches[i][0], ('h', 'x'))


        super(RecurrentGraph, self).__init__(graph, in_node=split, out_node=stack)
Пример #3
0
def run():
    np.random.seed(3)
    layers = [
        dp.Activation('relu'),
        dp.Activation('sigmoid'),
        dp.Activation('tanh'),
        dp.FullyConnected(
            n_output=3,
            weights=dp.NormalFiller(sigma=0.01),
        ),
        dp.Dropout(0.2),
        dp.DropoutFullyConnected(
            n_output=10,
            weights=dp.NormalFiller(sigma=0.01),
            dropout=0.5,
        ),
    ]

    input_shape = (1, 5)
    x = np.random.normal(size=input_shape).astype(dp.float_)
    for layer in layers:
        dp.misc.check_bprop(layer, x)

    conv_layers = [
        dp.Convolutional(
            n_filters=32,
            filter_shape=(3, 3),
            border_mode='same',
            weights=dp.NormalFiller(sigma=0.01),
        ),
        dp.Convolutional(
            n_filters=32,
            filter_shape=(5, 5),
            border_mode='valid',
            weights=dp.NormalFiller(sigma=0.01),
        ),
        dp.Pool(
            win_shape=(3, 3),
            strides=(2, 2),
            method='max',
        )
    ]
    input_shape = (5, 3, 8, 8)
    x = np.random.normal(size=input_shape).astype(dp.float_)
    for layer in conv_layers:
        dp.misc.check_bprop(layer, x)
Пример #4
0
        method='max',
    )


net = dp.NeuralNetwork(
    layers=[
        conv_layer(32),
        dp.ReLU(),
        pool_layer(),
        conv_layer(32),
        dp.ReLU(),
        pool_layer(),
        conv_layer(64),
        dp.ReLU(),
        pool_layer(),
        dp.Flatten(),
        dp.Dropout(),
        dp.Affine(n_out=64,
                  weights=dp.Parameter(dp.AutoFiller(gain=1.25),
                                       weight_decay=0.03)),
        dp.ReLU(),
        dp.Affine(
            n_out=dataset.n_classes,
            weights=dp.Parameter(dp.AutoFiller(gain=1.25)),
        )
    ],
    loss=dp.SoftmaxCrossEntropy(),
)

profile(net, train_input)
Пример #5
0
def run():
    # Prepare MNIST data
    dataset = dp.datasets.MNIST()
    x, y = dataset.data(flat=True)
    x = x.astype(dp.float_)
    y = y.astype(dp.int_)
    train_idx, test_idx = dataset.split()
    x_train = x[train_idx]
    y_train = y[train_idx]
    x_test = x[test_idx]
    y_test = y[test_idx]

    scaler = dp.UniformScaler(high=255.)
    x_train = scaler.fit_transform(x_train)
    x_test = scaler.transform(x_test)

    # Generate image pairs
    n_pairs = 100000
    x1 = np.empty((n_pairs, 28 * 28), dtype=dp.float_)
    x2 = np.empty_like(x1, dtype=dp.float_)
    y = np.empty(n_pairs, dtype=dp.int_)
    n_imgs = x_train.shape[0]
    n = 0
    while n < n_pairs:
        i = random.randint(0, n_imgs - 1)
        j = random.randint(0, n_imgs - 1)
        if i == j:
            continue
        x1[n, ...] = x_train[i]
        x2[n, ...] = x_train[j]
        if y_train[i] == y_train[j]:
            y[n] = 1
        else:
            y[n] = 0
        n += 1

    # Input to network
    train_input = dp.SupervisedSiameseInput(x1, x2, y, batch_size=128)
    test_input = dp.SupervisedInput(x_test, y_test)

    # Setup network
    net = dp.SiameseNetwork(
        siamese_layers=[
            dp.Dropout(),
            dp.FullyConnected(
                n_output=800,
                weights=dp.Parameter(dp.AutoFiller(), weight_decay=0.00001),
            ),
            dp.Activation('relu'),
            dp.FullyConnected(
                n_output=800,
                weights=dp.Parameter(dp.AutoFiller(), weight_decay=0.00001),
            ),
            dp.Activation('relu'),
            dp.FullyConnected(
                n_output=2,
                weights=dp.Parameter(dp.AutoFiller(), weight_decay=0.00001),
            ),
        ],
        loss_layer=dp.ContrastiveLoss(margin=0.5),
    )

    # Train network
    trainer = dp.StochasticGradientDescent(
        max_epochs=10,
        learn_rule=dp.RMSProp(learn_rate=0.001),
    )
    trainer.train(net, train_input)

    # Visualize feature space
    feat = net.features(test_input)
    colors = [
        'tomato', 'lawngreen', 'royalblue', 'gold', 'saddlebrown', 'violet',
        'turquoise', 'mediumpurple', 'darkorange', 'darkgray'
    ]
    plt.figure()
    for i in range(10):
        plt.scatter(feat[y_test == i, 0],
                    feat[y_test == i, 1],
                    s=3,
                    c=colors[i],
                    linewidths=0)
    plt.legend([str(i) for i in range(10)], scatterpoints=1, markerscale=4)
    if not os.path.exists('mnist'):
        os.mkdirs('mnist')
    plt.savefig(os.path.join('mnist', 'siamese_dists.png'), dpi=200)