Esempio n. 1
0
 def test_fit(self):
     x = np.column_stack([np.ones(len(X_TRAIN)), X_TRAIN])
     mlp = MultilayerPerceptron(num_inputs=4,
                                num_hidden_layers=1,
                                num_hidden_nodes=3)
     mlp.fit(x, LABELS_TRAIN, epochnum=5, add_constant=False)
     mlp.classify(x, add_constant=False)
Esempio n. 2
0
    def test_easy(self):
        x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
        y = np.array([0, 1, 0, 1])

        mlp = MultilayerPerceptron(
            num_inputs=3, num_hidden_layers=1, num_hidden_nodes=6, seed=323440,
            learn_rate = .8, learn_rate_evol='constant', momentum=.1
        )
        mlp.fit(x, y, add_constant=True, epochnum=100, verbose=True)

        print(mlp.classify(x, add_constant=True))
        assert np.allclose(mlp.classify(x, add_constant=True).reshape(len(y)),
                           y)
Esempio n. 3
0
    def test_easy(self):
        x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
        y = np.array([0, 1, 0, 1])

        mlp = MultilayerPerceptron(num_inputs=3,
                                   num_hidden_layers=1,
                                   num_hidden_nodes=6,
                                   seed=323440,
                                   learn_rate=.8,
                                   learn_rate_evol='constant',
                                   momentum=.1)
        mlp.fit(x, y, add_constant=True, epochnum=100, verbose=True)

        print(mlp.classify(x, add_constant=True))
        assert np.allclose(
            mlp.classify(x, add_constant=True).reshape(len(y)), y)
Esempio n. 4
0
    def test_easy_multidim_y(self):
        x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
        y = np.array([[0, 1], [1, 0], [0, 1], [1, 0]])

        mlp = MultilayerPerceptron(
            num_inputs=3, num_outputs=2,
            num_hidden_layers=1, num_hidden_nodes=6, seed=323440,
            learn_rate = .8, learn_rate_evol='constant', momentum=.1
        )
        mlp.fit(x, y, epochnum=50)
        results = mlp.classify(x, max_ind=True)
        assert np.allclose(to_dummies(results), y)
Esempio n. 5
0
    def test_easy_multidim_y(self):
        x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
        y = np.array([[0, 1], [1, 0], [0, 1], [1, 0]])

        mlp = MultilayerPerceptron(num_inputs=3,
                                   num_outputs=2,
                                   num_hidden_layers=1,
                                   num_hidden_nodes=6,
                                   seed=323440,
                                   learn_rate=.8,
                                   learn_rate_evol='constant',
                                   momentum=.1)
        mlp.fit(x, y, epochnum=50)
        results = mlp.classify(x, max_ind=True)
        assert np.allclose(to_dummies(results), y)
# Setup estimator
mlp = MultilayerPerceptron(num_inputs=3,
                           num_outputs=1,
                           num_hidden_layers=1,
                           num_hidden_nodes=6,
                           learn_rate=1,
                           learn_rate_evol='sqrt',
                           momentum=.1,
                           seed=23456)

# Estimate and plot
start = time.perf_counter()
with PdfPages('ex_mlp_2d.pdf') as pdf:
    for i in range(0, 25):
        mlp.fit(x, y, epochnum=20, add_constant=True)
        Z = mlp.classify(x_flatmesh, add_constant=True)
        Z = Z.reshape(x0.shape)

        fig = plt.figure()
        ax = fig.add_subplot(1, 1, 1)

        ax.contour(x0, x1, Z, cmap=plt.cm.Paired)
        ax.scatter(x[:, 0], x[:, 1], c=y, cmap=plt.cm.Paired)

        pdf.savefig(fig)
        plt.close(fig)

        if i % 5 == 4:
            print(i + 1)
print('Time elapsed: {:6.3f}'.format(time.perf_counter() - start))

# Setup estimator
num_hidden_nodes = [101]
mlp = MultilayerPerceptron(
    num_inputs=data['train'][0].shape[1]+1,
    num_outputs=data['train'][2].shape[1],
    num_hidden_layers=len(num_hidden_nodes), num_hidden_nodes=num_hidden_nodes,
    learn_rate=.5, momentum=.1, seed=23456
)

# Estimate multilayer perceptron
start = time.perf_counter()
mlp.fit(data['train'][0], data['train'][2],
        epochnum=10, verbose=1)
pred = mlp.classify(data['test'][0], max_ind=True)
print("Time: {:5.2f}, Error: {:5.4f}".format(
    time.perf_counter() - start,
    1 - np.mean(pred == data['test'][1])
))


# Visualize first hidden layer
fig1 = plt.figure(figsize=(10, 10))
for i in range(num_hidden_nodes[0]-1):
    side = np.sqrt(num_hidden_nodes[0]-1)
    ax = fig1.add_subplot(side, side, i+1)
    ax.imshow(mlp.layers[0].weights[1:, i].reshape([28, 28]), cmap=cm.Greys_r)
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
fig1.savefig('ex_mlp_mnist.pdf')
x0, x1 = np.meshgrid(np.linspace(x0_min, x0_max, 500),
                     np.linspace(x1_min, x1_max, 500))
x_flatmesh = np.column_stack([x0.ravel(), x1.ravel()])

# Setup estimator
mlp = MultilayerPerceptron(
    num_inputs=3, num_outputs=1, num_hidden_layers=1, num_hidden_nodes=6,
    learn_rate=1, learn_rate_evol='sqrt', momentum=.1, seed=23456
)

# Estimate and plot
start = time.perf_counter()
with PdfPages('ex_mlp_2d.pdf') as pdf:
    for i in range(0, 25):
        mlp.fit(x, y, epochnum=20, add_constant=True)
        Z = mlp.classify(x_flatmesh, add_constant=True)
        Z = Z.reshape(x0.shape)

        fig = plt.figure()
        ax = fig.add_subplot(1, 1, 1)

        ax.contour(x0, x1, Z, cmap=plt.cm.Paired)
        ax.scatter(x[:,0], x[:,1], c=y, cmap=plt.cm.Paired)

        pdf.savefig(fig)
        plt.close(fig)

        if i % 5 == 4:
            print(i+1)
print('Time elapsed: {:6.3f}'.format(time.perf_counter() - start))
    data = pickle.load(f)

data = {key: (val[0], val[1], to_dummies(val[1])) for key, val in data.items()}

# Setup estimator
num_hidden_nodes = [101]
mlp = MultilayerPerceptron(num_inputs=data['train'][0].shape[1] + 1,
                           num_outputs=data['train'][2].shape[1],
                           num_hidden_layers=len(num_hidden_nodes),
                           num_hidden_nodes=num_hidden_nodes,
                           learn_rate=.5,
                           momentum=.1,
                           seed=23456)

# Estimate multilayer perceptron
start = time.perf_counter()
mlp.fit(data['train'][0], data['train'][2], epochnum=10, verbose=1)
pred = mlp.classify(data['test'][0], max_ind=True)
print("Time: {:5.2f}, Error: {:5.4f}".format(
    time.perf_counter() - start, 1 - np.mean(pred == data['test'][1])))

# Visualize first hidden layer
fig1 = plt.figure(figsize=(10, 10))
for i in range(num_hidden_nodes[0] - 1):
    side = np.sqrt(num_hidden_nodes[0] - 1)
    ax = fig1.add_subplot(side, side, i + 1)
    ax.imshow(mlp.layers[0].weights[1:, i].reshape([28, 28]), cmap=cm.Greys_r)
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
fig1.savefig('ex_mlp_mnist.pdf')
Esempio n. 10
0
 def test_fit(self):
     x = np.column_stack([np.ones(len(X_TRAIN)), X_TRAIN])
     mlp = MultilayerPerceptron(
         num_inputs=4, num_hidden_layers=1, num_hidden_nodes=3)
     mlp.fit(x, LABELS_TRAIN, epochnum=5, add_constant=False)
     mlp.classify(x, add_constant=False)