def main(max_iter):
    seed = 100
    nb_data = 1000

    print("loading data ....")
    mnist = fetch_mldata('MNIST original',
                         data_home=os.path.join(os.path.dirname(__file__),
                                                './data'))
    X_train = mnist.data.reshape((-1, 1, 28, 28)) / 255.0
    np.random.seed(seed)
    X_train = np.random.permutation(X_train)[:nb_data]
    y_train = mnist.target
    np.random.seed(seed)
    y_train = np.random.permutation(y_train)[:nb_data]
    n_classes = np.unique(y_train).size

    print("building model ...")
    net = xeno.Model()
    net.add(xeno.layers.Convolution(1, (3, 3), input_shape=(None, 1, 28, 28)))
    net.add(xeno.layers.MeanPooling((2, 2)))
    net.add(xeno.layers.Convolution(2, (4, 4)))
    net.add(xeno.layers.MeanPooling((2, 2)))
    net.add(xeno.layers.Flatten())
    net.add(xeno.layers.Softmax(n_out=n_classes))
    net.compile()

    print("train model ... ")
    net.fit(X_train,
            xeno.utils.data.one_hot(y_train),
            max_iter=max_iter,
            validation_split=0.1,
            batch_size=100)
Beispiel #2
0
def main2(max_iter):
    nb_batch = 30
    nb_seq = 20

    xs, ys, x_size, y_size = prepare_data(nb_seq)

    net = xeno.Model()
    net.add(xeno.layers.Embedding(nb_batch=nb_batch, nb_seq=nb_seq,
                                  n_out=200, input_size=x_size,
                                  static=False))
    net.add(xeno.layers.BatchLSTM(n_out=400, return_sequence=True))
    net.add(xeno.layers.BatchLSTM(n_out=200, return_sequence=True))
    net.add(xeno.layers.MeanPooling((nb_seq, 1)))
    net.add(xeno.layers.Flatten())
    net.add(xeno.layers.Softmax(n_out=y_size))
    net.compile(loss='scce', optimizer=xeno.optimizers.RMSprop())
    net.fit(xs, ys, batch_size=nb_batch, validation_split=0.1, max_iter=max_iter)
def main4(max_iter):
    # test Adagrad optimizer

    n_classes, X_train, y_train = get_data()

    # model
    print("building model ...")
    model = xeno.Model()
    model.add(xeno.layers.Dense(n_out=100, n_in=784, activation='relu'))
    model.add(xeno.layers.Softmax(n_out=n_classes))
    model.compile(loss='scce', optimizer='adagrad')

    # train
    print("train model ... ")
    model.fit(X_train,
              xeno.utils.data.one_hot(y_train),
              max_iter=max_iter,
              validation_split=0.1)
def main2(max_iter):
    batch_size, vocab_size, time_steps, batch_in, batch_out = get_data()

    print("Building model ...")
    net = xeno.Model()
    net.add(
        xeno.layers.BatchLSTM(n_out=300,
                              n_in=vocab_size,
                              return_sequence=False,
                              nb_batch=batch_size,
                              nb_seq=time_steps))
    # net.add(xeno.layers.MeanPooling(pool_size=(time_steps, 1)))
    # net.add(xeno.layers.Flatten())
    net.add(xeno.layers.Softmax(n_out=vocab_size))
    net.compile(loss=xeno.objectives.SCCE(),
                optimizer=xeno.optimizers.SGD(lr=0.00001, clip=5))

    print("Train model ...")
    net.fit(batch_in, batch_out, max_iter=max_iter, batch_size=batch_size)
def main3(max_iter):
    # test NesterovMomentum optimizer

    n_classes, X_train, y_train = get_data()

    # model
    print("building model ...")
    model = xeno.Model()
    model.add(xeno.layers.Dense(n_out=200, n_in=784, activation='relu'))
    model.add(xeno.layers.Softmax(n_out=n_classes))
    model.compile(loss=xeno.objectives.SCCE(),
                  optimizer=xeno.optimizers.NesterovMomentum())

    # train
    print("train model ... ")
    model.fit(X_train,
              xeno.utils.data.one_hot(y_train),
              max_iter=max_iter,
              validation_split=0.1)
def main(max_iter):
    n_classes, X_train, y_train = get_data()

    # model
    print("building model ...")
    model = xeno.Model()
    model.add(
        xeno.layers.Dense(n_out=200,
                          n_in=784,
                          activation=xeno.activations.ReLU()))
    model.add(
        xeno.layers.Dense(n_out=n_classes,
                          activation=xeno.activations.Softmax()))
    model.compile(loss=xeno.objectives.SCCE(), optimizer=xeno.optimizers.SGD())

    # train
    print("train model ... ")
    model.fit(X_train,
              xeno.utils.data.one_hot(y_train),
              max_iter=max_iter,
              validation_split=0.1)