示例#1
0
def test_import_export() -> None:
    """Test that I can export and re-import weights."""
    dtype = "float64"
    inputs = [[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1], [1, 1, 0]]
    targets = [[0, 1], [1, 0], [1, 0], [0, 1], [1, 0]]

    shapes = [9, 7, 5, 3]
    learning_rate = 0.1

    input_size = (None, len(inputs[0]))
    output_size = (None, len(targets[0]))

    nn = sn.SimpleNet(
        input_shape=input_size,
        output_shape=output_size,
        hidden_layer_sizes=shapes,
        learning_rate=learning_rate,
        activation_function=sn.sigmoid,
        output_activation=sn.softmax,
        loss_function=sn.cross_entropy,
        dtype=dtype,
    )

    nn2 = sn.SimpleNet(
        input_shape=input_size,
        output_shape=output_size,
        hidden_layer_sizes=shapes,
        learning_rate=learning_rate,
        activation_function=sn.sigmoid,
        output_activation=sn.softmax,
        loss_function=sn.cross_entropy,
        dtype=dtype,
    )

    for idx, weight in enumerate(nn.weights):
        assert not np.allclose(weight, nn2.weights[idx])

    with tempfile.NamedTemporaryFile(suffix=".npz") as f:
        nn.export_model(f.name)
        nn2.import_model(f.name)

    for idx, weight in enumerate(nn.weights):
        assert np.allclose(weight, nn2.weights[idx])

    for idx, bias in enumerate(nn.biases):
        assert np.allclose(bias, nn2.biases[idx])
示例#2
0
def test_validate_neg_log_likelihood() -> None:
    """Use gradient checking to validate neg_log_likelihood cost."""
    inputs = [[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1], [1, 1, 0]]
    targets = [[0], [1], [1], [0], [1]]

    shapes = [9, 7, 5, 3]

    nn = sn.SimpleNet(
        input_shape=(None, len(inputs[0])),
        output_shape=(None, len(targets[0])),
        hidden_layer_sizes=shapes,
        learning_rate=0.1,
        activation_function=sn.sigmoid,
        output_activation=sn.sigmoid,
        loss_function=sn.neg_log_likelihood,
        dtype=np.float64,
        seed=42,
    )

    assert nn.validate(inputs=inputs, targets=targets, epsilon=1e-7)
示例#3
0
def main(import_progress: str = None, save_progress: str = None) -> None:
    """Download mnist and test a simple MLP.

    Arguments:
        import_progress: If given, import weights from here
        save_progress: If given, save weights to here (epoch number will be
                       appended to the filename)
    """
    print("Starting...")
    try:
        data = np.load(FOLDER + '/data.npz')
        X_train, y_train, X_test, y_test = (v for _, v in sorted(data.items()))
    except FileNotFoundError:

        print("Gathering the training data")
        X_train, y_train = get_images_and_labels('train', folder=FOLDER)

        errmsg = "Train images were loaded incorrectly"
        assert (X_train.shape, y_train.shape) == ((60000, 28, 28), (60000,
                                                                    1)), errmsg
        X_train = X_train.reshape(60000, 784)  # noqa

        print("Gathering the test data")
        X_test, y_test = get_images_and_labels('test', folder=FOLDER)

        errmsg = "Test images were loaded incorrectly"
        assert (X_test.shape, y_test.shape) == ((10000, 28, 28), (10000,
                                                                  1)), errmsg
        X_test = X_test.reshape(10000, 784)  # noqa

        np.savez(FOLDER + '/data', X_train, y_train, X_test, y_test)

    y_train = np.eye(10)[y_train.reshape(y_train.shape[0], )]
    y_test = np.eye(10)[y_test.reshape(y_test.shape[0], )]

    nn = sn.SimpleNet(
        input_shape=(None, 784),
        output_shape=(None, 10),
        hidden_layer_sizes=(256, 128, 32),
        activation_function=sn.relu,
        learning_rate=0.008,
        output_activation=sn.softmax,
        loss_function=sn.cross_entropy,
        seed=42,
    )

    epochs = 5
    batch_size = 200

    if import_progress:
        nn.import_model(import_progress)

    print('batch size {}'.format(batch_size))
    times = []
    for e in range(epochs):
        batch_errors = []
        start = time.time()
        for idx in range(0, X_train.shape[0], batch_size):
            nn.learn(inputs=X_train[idx:idx + batch_size],
                     targets=y_train[idx:idx + batch_size])
            batch_err = nn.err
            batch_errors.append(batch_err)

        times.append(time.time() - start)
        epoch_err = sum(batch_errors) / len(batch_errors)
        print('epoch {}: {} seconds, cost: {}'.format(e, times[-1], epoch_err))

        train_acc = np.sum(
            nn.predict(X_train).argmax(axis=1) == y_train.argmax(
                axis=1)) / X_train.shape[0]
        print('train_accuracy:', train_acc)

        test_acc = np.sum(
            nn.predict(X_test).argmax(axis=1) == y_test.argmax(
                axis=1)) / X_test.shape[0]
        print('test_accuracy:', test_acc, '\n')

        if save_progress:
            nn.export_model(f"{save_progress}_{e}")

    print("Total time: {}".format(sum(times)))
示例#4
0
keras.backend.set_floatx(dtype)

inputs = [[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1], [1, 1, 0]]
targets = [[0, 1], [1, 0], [1, 0], [0, 1], [1, 0]]

shapes = [9, 7, 5, 3]
learning_rate = 0.1

input_size = (None, len(inputs[0]))
output_size = (None, len(targets[0]))

nn = sn.SimpleNet(
    input_shape=input_size,
    output_shape=output_size,
    hidden_layer_sizes=shapes,
    learning_rate=learning_rate,
    activation_function=sn.sigmoid,
    output_activation=sn.softmax,
    loss_function=sn.cross_entropy,
    dtype=dtype,
)

initial_weights, initial_biases = deepcopy(nn.weights), deepcopy(nn.biases)

keras_inputs = Input(shape=(len(inputs[0]),))

x = keras_inputs
for shape in [9, 7, 5, 3]:
    x = Dense(shape, activation="sigmoid")(x)
outputs = Dense(len(targets[0]), activation=None)(x)

model = Model(inputs=keras_inputs, outputs=outputs)
示例#5
0
from keras.models import Model
from keras.optimizers import SGD

import simplenet as sn

data = [[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1], [1, 1, 0]]
targets = [[0, 1], [1, 0], [1, 0], [0, 1], [1, 0]]

learning_rate = 0.1
dtype = 'float64'

nn = sn.SimpleNet(
    input_shape=(None, len(data[0])), output_shape=(None, len(targets[0])),
    hidden_layer_sizes=[9, 7, 5, 3],
    learning_rate=learning_rate,
    activation_function=sn.sigmoid,
    output_activation=sn.softmax,
    loss_function=sn.cross_entropy,
    dtype=dtype,
    )

keras.backend.set_floatx(dtype)

inputs = Input(shape=(len(data[0]),), dtype=dtype)
x = Dense(9, activation='sigmoid')(inputs)
x = Dense(7, activation='sigmoid')(x)
x = Dense(5, activation='sigmoid')(x)
x = Dense(3, activation='sigmoid')(x)
outputs = Dense(len(targets[0]), activation='softmax')(x)

model = Model(inputs=inputs, outputs=outputs)