Exemple #1
0
def test_simple_mlp():
    Asamples = np.random.multivariate_normal([6, 6], [[1, 0], [0, 1]], 200)
    Bsamples = np.random.multivariate_normal([1, 1], [[1, 0], [0, 1]], 200)

    x_train = np.vstack((Asamples, Bsamples))
    y_train = np.vstack((np.array([[0, 1]] * 200), np.array([[1, 0]] * 200)))
    print(x_train.shape, y_train.shape)
    num_classes = 2
    batch_size = 10
    epochs = 10

    model = Sequential()
    model.add(Dense(units=2, activation='relu', input_shape=(2, )))
    model.add(Dense(units=num_classes, activation='softmax'))

    model.summary()

    model.compile(loss='categorical_crossentropy',
                  optimizer='sgd',
                  learning_rate=0.05)

    history = model.fit(x_train,
                        y_train,
                        batch_size=batch_size,
                        epochs=epochs,
                        verbose=1,
                        validation_data=(None, None))
Exemple #2
0
def test_mlp_mnist():
    train_set, test_set = mnist(one_hot=True)

    x_train, y_train = train_set[0], train_set[1]
    x_test, y_test = test_set[0], test_set[1]
    x_train = x_train.reshape(x_train.shape[0],
                              x_train.shape[1] * x_train.shape[2])
    x_test = x_test.reshape(x_test.shape[0], x_test.shape[1] * x_test.shape[2])

    num_classes = 10
    batch_size = 32
    epochs = 1

    model = Sequential()
    model.add(Dense(units=256, activation='relu', input_shape=(784, )))
    model.add(Dense(units=128, activation='relu'))
    model.add(Dense(units=64, activation='relu'))
    model.add(Dense(num_classes, activation='softmax'))

    model.summary()

    model.compile(loss='categorical_crossentropy',
                  optimizer='momentum',
                  learning_rate=0.05)
    history = model.fit(x_train,
                        y_train,
                        batch_size=batch_size,
                        epochs=epochs,
                        verbose=1,
                        validation_data=(x_test, y_test))
    score = model.evaluate(x_test, y_test, verbose=0)
Exemple #3
0
def test_mlp():
    model = Sequential()
    model.add(Dense(units=512, activation='relu', input_shape=(784, )))
    model.add(Dense(units=512, activation='relu'))
    model.add(Dense(num_classes, activation='softmax'))

    model.summary()

    model.compile(loss='categorical_crossentropy', optimizer='RMSprop')

    history = model.fit(x_train,
                        y_train,
                        batch_size=batch_size,
                        epochs=epochs,
                        verbose=1,
                        validation_data=(None, None))
    score = model.evaluate(x_test, y_test, verbose=0)
Exemple #4
0
def test_mlp_forward():
    print("\n")
    x = np.array([[1, 0], [0, 1], [1, 1], [0, 0]])
    y = np.array([[1, 0], [1, 0], [0, 1], [0, 1]])
    print("x shape:", x.shape, "y shape:", y.shape)
    print("\n")
    print("x", x)
    print("y", y)
    print("\n")
    num_classes = 2
    batch_size = 1
    epochs = 20

    # Build network
    input0 = InputLayer(shape=[2])
    dense1 = Dense(units=2, activation='relu', input_shape=[2])
    input0.outbound.append(dense1)
    dense1.inbound.append(input0)

    # Init parameters of dense1
    dense1.name = 'dense1'
    kernel = np.array([[1., 2.], [2., 1.]])
    bias = np.array([1., 1.])
    dense1.params = {'kernel': kernel, 'bias': bias}
    dense1.isInitialized = True
    print("kernel", kernel)
    print("bias", bias)
    # ForwardPropagation
    inputs = input0.ForwardPropagation(feed=x)
    print("inputs", inputs)
    f1 = dense1.ForwardPropagation()
    print("f1", f1)
    f1_expect = np.matmul(inputs, kernel) + bias
    print("expect f1", f1_expect)
Exemple #5
0
def test_mlp_backward():
    print("\n")
    print("\n")
    x = np.array([[1, 0], [1, 1], [0, 1]])
    y = np.array([[0., 1.], [1., 0.], [0, 1]])
    print("x shape:", x.shape, "y shape:", y.shape)
    print("\n")
    print("x", x)
    print("y", y)
    print("\n")
    # Build network
    input0 = InputLayer(shape=[2])
    dense1 = Dense(units=2, activation='linear', input_shape=(2, ))
    loss2 = SoftmaxCategoricalCrossentropy()

    input0.outbound.append(dense1)
    dense1.inbound.append(input0)
    dense1.outbound.append(loss2)
    loss2.inbound.append(dense1)

    # Init parameters of dense1
    dense1.name = 'dense1'
    kernel = np.array([[1., 2.], [2., 1.]])
    bias = np.array([1., 1.])
    dense1.params = {'kernel': kernel, 'bias': bias}
    dense1.grads = {'kernel': np.array([]), 'bias': np.array([])}
    dense1.isInitialized = True
    print("kernel", kernel)
    print("bias", bias)
    # ForwardPropagation
    inputs = input0.ForwardPropagation(feed=x)
    print("inputs", inputs)
    f1 = dense1.ForwardPropagation()
    print("f1", f1)
    # BackwardPropagation
    logits = np.array([[-10., 10.], [10., -10.], [-10., 10.]])
    loss2.y_true = np.array([[0., 1.], [1., 0.], [0., 1.]])
    loss2.input_value = logits
    loss2.output_value = softmax_categorical_crossentropy(y_true=loss2.y_true,
                                                          logits=logits)
    print("loss2.output_value", loss2.output_value)
    grad_loss = loss2.BackwardPropagation(grads=None)
    print("grad_loss", grad_loss)
    grad_dense = dense1.BackwardPropagation(grad=grad_loss)
    print("grad_dense", grad_dense)
Exemple #6
0
def test_single_layer():
    Asamples = np.random.multivariate_normal([6, 6], [[1, 0], [0, 1]], 200)
    Bsamples = np.random.multivariate_normal([1, 1], [[1, 0], [0, 1]], 200)
    Csamples = np.random.multivariate_normal([12, 12], [[1, 0], [0, 1]], 200)

    # plt.figure()
    # plt.plot(Asamples[:, 0], Asamples[:, 1], 'r.')
    # plt.plot(Bsamples[:, 0], Bsamples[:, 1], 'b.')
    # plt.plot(Csamples[:, 0], Csamples[:, 1], 'g.')
    # plt.show()

    x_train = np.vstack((Asamples, Bsamples, Csamples))
    y_train = np.vstack(
        (np.array([[0, 0, 1]] * 200), np.array([[0, 1, 0]] * 200),
         np.array([[1, 0, 0]] * 200)))
    print(x_train.shape, y_train.shape)
    num_classes = 3
    batch_size = 20
    epochs = 100

    model = Sequential()
    model.add(Dense(units=num_classes, activation='softmax',
                    input_shape=(2, )))

    model.summary()

    model.compile(loss='categorical_crossentropy',
                  optimizer='sgd',
                  learning_rate=0.005)

    history = model.fit(x_train,
                        y_train,
                        batch_size=batch_size,
                        epochs=epochs,
                        verbose=1,
                        validation_data=(None, None))
Exemple #7
0
from SemiFlow.layer import Dense
from SemiFlow.Model import Sequential
from SemiFlow.utils.dataset import mnist

train_set, valid_set, test_set = mnist(one_hot=True)

x_train, y_train = train_set[0], train_set[1]
x_test, y_test = test_set[0], test_set[1]
x_val, y_val = valid_set[0], valid_set[1]

num_classes = 10
batch_size = 32
epochs = 3

model = Sequential()
model.add(Dense(units=256, activation='relu', input_shape=(784, )))
model.add(Dense(units=128, activation='relu'))
model.add(Dense(units=64, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy',
              optimizer='RMSprop',
              learning_rate=0.05,
              metrics=['train_loss', 'val_loss'])

history = model.fit(
    x_train,
    y_train,
    batch_size=batch_size,
Exemple #8
0
num_classes = 10
batch_size = 32
epochs = 1

model = Sequential()
model.add(
    Conv2D(32,
           kernel_size=(3, 3),
           activation='relu',
           input_shape=(28, 28, 1),
           dtype='float32'))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pooling_size=(3, 3)))
model.add(Flatten())
model.add(Dense(units=64, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy',
              optimizer='RMSprop',
              learning_rate=0.05)

history = model.fit(x_train,
                    y_train,
                    batch_size=batch_size,
                    epochs=epochs,
                    verbose=1,
                    validation_data=(x_val, y_val))
score = model.evaluate(x_test, y_test, verbose=0)