Ejemplo n.º 1
0
"""Trains a simple convnet on the MNIST dataset."""

from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Activation, Dense, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.regularizers import l2
from keras import backend as K

from importance_sampling.training import ConstantTimeImportanceTraining
from example_utils import get_parser

if __name__ == "__main__":
    parser = get_parser("Train a CNN on MNIST")
    args = parser.parse_args()

    batch_size = 128
    num_classes = 10
    epochs = 10

    # input image dimensions
    img_rows, img_cols = 28, 28

    # the data, shuffled and split between train and test sets
    (x_train, y_train), (x_test, y_test) = mnist.load_data()

    if K.image_data_format() == 'channels_first':
        x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
        x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
Ejemplo n.º 2
0
"""Trains a simple logistic regression on the MNIST dataset."""

from __future__ import print_function

import keras
from keras import backend as K
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Activation, Dense
from keras.optimizers import SGD

from importance_sampling.training import SVRG
from example_utils import get_parser

if __name__ == "__main__":
    parser = get_parser("Train logistic regression with SVRG on MNIST")
    args = parser.parse_args()

    batch_size = 16
    num_classes = 10
    epochs = 100

    # the data, shuffled and split between train and test sets
    (x_train, y_train), (x_test, y_test) = mnist.load_data()

    x_train = x_train.reshape(60000, 784)
    x_test = x_test.reshape(10000, 784)
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    x_train /= 255
    x_test /= 255
Ejemplo n.º 3
0
"""Trains a simple fully connected NN on the MNIST dataset."""

from __future__ import print_function

import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Activation, Dense
from keras.optimizers import RMSprop
from keras.regularizers import l2

from importance_sampling.training import ImportanceTraining
from example_utils import get_parser

if __name__ == "__main__":
    parser = get_parser("Train an MLP on MNIST")
    args = parser.parse_args()

    batch_size = 128
    num_classes = 10
    epochs = 10

    # the data, shuffled and split between train and test sets
    (x_train, y_train), (x_test, y_test) = mnist.load_data()

    x_train = x_train.reshape(60000, 784)
    x_test = x_test.reshape(10000, 784)
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    x_train /= 255
    x_test /= 255
Ejemplo n.º 4
0
        if t >= self._total_time:
            self.model.stop_training = True

        lr = self._get_lr(t / self._total_time)
        if lr != self._lr:
            self._lr = lr
            K.set_value(self.model.optimizer.lr, self._lr)

    @property
    def lr(self):
        return self._lr


if __name__ == "__main__":
    parser = get_parser("Train a ResNet on CIFAR10")
    parser.add_argument(
        "--depth",
        type=int,
        default=28,
        help="Choose the depth of the resnet"
    )
    parser.add_argument(
        "--width",
        type=int,
        default=2,
        help="Choose the width of the resnet"
    )
    parser.add_argument(
        "--presample",
        type=float,