コード例 #1
0
def fit_mlp(image_size=(28, 28),
            datasets='../data/mnist.pkl.gz',
            outpath='../output/mnist_lenet.params',
            n_hidden=500,
            learning_rate=0.01,
            L1_reg=0.00,
            L2_reg=0.001,
            n_epochs=1000,
            batch_size=20,
            patience=10000,
            patience_increase=2,
            improvement_threshold=0.995):

    index = T.lscalar()
    x = T.matrix('x')
    y = T.ivector('y')

    classifier = MLP(rng=rng.RandomState(SEED),
                     input=x,
                     n_in=reduce(np.multiply, image_size),
                     n_hidden=n_hidden,
                     n_out=10)
    cost = (classifier.negative_log_likelihood(y) + L1_reg * classifier.L1 +
            L2_reg * classifier.L2)
    learner = SupervisedMSGD(index, x, y, batch_size, learning_rate,
                             load_data(datasets), outpath, classifier, cost)

    best_validation_loss, best_iter, epoch, elapsed_time = learner.fit(
        n_epochs=n_epochs,
        patience=patience,
        patience_increase=patience_increase,
        improvement_threshold=improvement_threshold)
    display_results(best_validation_loss, elapsed_time, epoch)

    return learner
コード例 #2
0
def fit_lenet(image_size=(28, 28), n_image_channels=1,
              datasets='../data/mnist.pkl.gz', outpath='../output/mnist_lenet.params',
              filter_shape=(5, 5), nkerns=(2, 6), pool_size=(2,2), n_hidden=500,
              learning_rate=0.01, L1_reg=0.00, L2_reg=0.001,
              n_epochs=1000, batch_size=20, patience=10000,
              patience_increase=2, improvement_threshold=0.995):


    index = T.lscalar()
    x = T.matrix('x')
    y = T.ivector('y')

    classifier = LeNet(
        rng=rng.RandomState(SEED),
        input=x,
        batch_size=batch_size,
        n_image_channels=n_image_channels,
        image_size=image_size,
        nkerns=nkerns,
        filter_shape=filter_shape,
        pool_size=pool_size,
        n_hidden=n_hidden
    )
    cost = (
        classifier.negative_log_likelihood(y)
        + L1_reg * classifier.L1
        + L2_reg * classifier.L2
    )
    learner = SupervisedMSGD(
        index,
        x,
        y,
        batch_size,
        learning_rate,
        load_data(datasets),
        outpath,
        classifier,
        cost
    )

    best_validation_loss, best_iter, epoch, elapsed_time = learner.fit(
        n_epochs=n_epochs,
        patience=patience,
        patience_increase=patience_increase,
        improvement_threshold=improvement_threshold
    )
    display_results(best_validation_loss, elapsed_time, epoch)

    return learner
コード例 #3
0
def fit_logistic(image_size=(28, 28),
             datasets='../data/mnist.pkl.gz', outpath='../output/mnist_logistic_regression.params',
             learning_rate=0.13, n_epochs=1000, batch_size=600,
             patience=5000, patience_increase=2, improvement_threshold=0.995):

    index = T.lscalar()
    x = T.matrix('x')
    y = T.ivector('y')


    classifier = LogisticRegression(
        input=x,
        n_in=reduce(np.multiply, image_size),
        n_out=10
    )
    cost = classifier.negative_log_likelihood(y)
    learner = SupervisedMSGD(
        index,
        x,
        y,
        batch_size,
        learning_rate,
        load_data(datasets),
        outpath,
        classifier,
        cost
    )

    best_validation_loss, best_iter, epoch, elapsed_time = learner.fit(
        n_epochs=n_epochs,
        patience=patience,
        patience_increase=patience_increase,
        improvement_threshold=improvement_threshold
    )
    display_results(best_validation_loss, elapsed_time, epoch)

    return learner