Exemple #1
0
def test_one_hot():
    skip_if_no_data()
    data = icml07.MNIST_rotated_background(which_set='train', one_hot=True, split=(100,100,100))
    assert data.y.shape[1] == 10   # MNITS hast 10 classes

    data = icml07.Rectangles(which_set='train', one_hot=True, split=(100,100,100))
    assert data.y.shape[1] == 2   # Two classes
def Transform():
    """Test smaller version of convolutional_network.ipynb"""
    which_experiment = "S100"
    skip.skip_if_no_data()
    yaml_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
    data_dir = string_utils.preprocess("${PYLEARN2_DATA_PATH}")
    save_path = os.path.join(data_dir, "cifar10", "experiment_" + string.lower(which_experiment))
    base_save_path = os.path.join(data_dir, "cifar10")
    # Escape potential backslashes in Windows filenames, since
    # they will be processed when the YAML parser will read it
    # as a string
    # save_path.replace('\\', r'\\')

    yaml = open("{0}/experiment_base_transform.yaml".format(yaml_file_path), "r").read()
    hyper_params = {
        "batch_size": 64,
        "output_channels_h1": 64,
        "output_channels_h2": 128,
        "output_channels_h3": 600,
        "max_epochs": 100,
        "save_path": save_path,
        "base_save_path": base_save_path,
    }
    yaml = yaml % (hyper_params)
    train = yaml_parse.load(yaml)
    train.main_loop()
def main():
    skip.skip_if_no_data()

    # setting
    data_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data'))
    save_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'pylearn2/result'))
    yaml_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'pylearn2/yaml'))

    # set hyper parameter
    yaml = open("{0}/conv_sample.yaml".format(yaml_path), 'r').read()
    hyper_params = {'train_stop': 50,
                    'valid_stop': 50050,
                    'test_stop': 50,
                    'batch_size': 50,
                    'output_channels_h0': 2,
                    'output_channels_h1': 2,
                    'max_epochs': 10,
                    'data_path': data_path,
                    'save_path': save_path}



    yaml = yaml % (hyper_params)

    # train
    train = yaml_parse.load(yaml)
    train.main_loop()
Exemple #4
0
def test_hepatitis():
    """test hepatitis dataset"""
    skip_if_no_data()
    data = hepatitis.Hepatitis()
    assert data.X is not None
    assert np.all(data.X != np.inf)
    assert np.all(data.X != np.nan)
Exemple #5
0
def test_avicenna():
    """test that train/valid/test sets load (when standardize=False/true)."""
    skip_if_no_data()
    data = Avicenna(which_set='train', standardize=False)
    assert data.X.shape == (150205, 120)

    data = Avicenna(which_set='valid', standardize=False)
    assert data.X.shape == (4096, 120)

    data = Avicenna(which_set='test', standardize=False)
    assert data.X.shape == (4096, 120)

    # test that train/valid/test sets load (when standardize=True).
    data_train = Avicenna(which_set='train', standardize=True)
    assert data_train.X.shape == (150205, 120)

    data_valid = Avicenna(which_set='valid', standardize=True)
    assert data_valid.X.shape == (4096, 120)

    data_test = Avicenna(which_set='test', standardize=True)
    assert data_test.X.shape == (4096, 120)

    dt = np.concatenate([data_train.X, data_valid.X, data_test.X], axis=0)
    # Force double precision to compute mean and std, otherwise the test
    # fails because of precision.
    assert np.allclose(dt.mean(dtype='float64'), 0)
    assert np.allclose(dt.std(dtype='float64'), 1.)
def train_convolutional_network(yaml_with_hyper_params):

    skip.skip_if_no_data()

    train = yaml_parse.load(yaml_with_hyper_params)

    train.main_loop()
Exemple #7
0
def test_avicenna():
    """test that train/valid/test sets load (when standardize=False/true)."""
    skip_if_no_data()
    data = Avicenna(which_set='train', standardize=False)
    assert data.X.shape == (150205, 120)

    data = Avicenna(which_set='valid', standardize=False)
    assert data.X.shape == (4096, 120)

    data = Avicenna(which_set='test', standardize=False)
    assert data.X.shape == (4096, 120)

    # test that train/valid/test sets load (when standardize=True).
    data_train = Avicenna(which_set='train', standardize=True)
    assert data.X.shape == (150205, 120)

    data_valid = Avicenna(which_set='valid', standardize=True)
    assert data.X.shape == (4096, 120)

    data_test = Avicenna(which_set='test', standardize=True)
    assert data.X.shape == (4096, 120)

    dt = np.concatenate([data_train.X, data_valid.X, data_test.X], axis=0)
    assert np.allclose(dt.mean(), 0)
    assert np.allclose(dt.std(), 1.)
def train_generic():
    PATH_TO_PYLEARN2_MODES_DIR = os.path.abspath('./model_pylearn2')
    PATH_TO_INPUT_DIR = os.path.abspath('./intermediate_files_pylearn2/toy_train/')
    PROJECT_NAME = 'toy_train'

    skip.skip_if_no_data()
    train_dbm_model(PATH_TO_PYLEARN2_MODES_DIR, PATH_TO_INPUT_DIR, PROJECT_NAME)
def test():
    skip_if_no_data()

    dirname = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')

    with open(os.path.join(dirname, 'sr_dataset.yaml'), 'r') as f:
        dataset = f.read()

    hyper_params = {'train_stop': 50}
    dataset = dataset % (hyper_params)

    with open(os.path.join(dirname, 'sr_model.yaml'), 'r') as f:
        model = f.read()

    with open(os.path.join(dirname, 'sr_algorithm.yaml'), 'r') as f:
        algorithm = f.read()

    hyper_params = {'batch_size': 10, 'valid_stop': 50050}
    algorithm = algorithm % (hyper_params)

    with open(os.path.join(dirname, 'sr_train.yaml'), 'r') as f:
        train = f.read()

    save_path = os.path.dirname(os.path.realpath(__file__))
    train = train % locals()

    train = yaml_parse.load(train)
    train.main_loop()

    try:
        os.remove("{}/softmax_regression.pkl".format(save_path))
        os.remove("{}/softmax_regression_best.pkl".format(save_path))
    except:
        pass
Exemple #10
0
def test_iris():
    """Load iris dataset"""
    skip_if_no_data()
    data = iris.Iris()
    assert data.X is not None
    assert np.all(data.X != np.inf)
    assert np.all(data.X != np.nan)
Exemple #11
0
def test_all_sparse_utlc():
    skip_if_no_data()
    for name in ['harry', 'terry', 'ule']:
        print "Loading sparse ", name
        train, valid, test = utlc.load_sparse_dataset(name, normalize=True)
        nb_elem = numpy.prod(train.shape)
        mi = train.data.min()
        ma = train.data.max()
        mi = min(0, mi)
        ma = max(0, ma)
        su = train.data.sum()
        mean = float(su) / nb_elem
        print name, "dtype, max, min, mean, nb non-zero, nb element, %sparse"
        print train.dtype, ma, mi, mean, train.nnz, nb_elem, (
            nb_elem - float(train.nnz)) / nb_elem
        print name, "max, min, mean, std (all stats on non-zero element)"
        print train.data.max(), train.data.min(), train.data.mean(
        ), train.data.std()
        assert scipy.sparse.issparse(
            train), "train is not sparse for %s dataset" % name
        assert scipy.sparse.issparse(
            valid), "valid is not sparse for %s dataset" % name
        assert scipy.sparse.issparse(
            test), "test is not sparse for %s dataset" % name
        assert train.shape[1] == test.shape[1] == valid.shape[
            1], "shapes of sparse %s dataset do  not match" % name
Exemple #12
0
def main():
    skip.skip_if_no_data()

    # setting
    data_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                             'data'))
    save_path = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'pylearn2/result'))
    yaml_path = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'pylearn2/yaml'))

    # set hyper parameter
    yaml = open("{0}/conv_sample.yaml".format(yaml_path), 'r').read()
    hyper_params = {
        'train_stop': 50,
        'valid_stop': 50050,
        'test_stop': 50,
        'batch_size': 50,
        'output_channels_h0': 2,
        'output_channels_h1': 2,
        'max_epochs': 10,
        'data_path': data_path,
        'save_path': save_path
    }

    yaml = yaml % (hyper_params)

    # train
    train = yaml_parse.load(yaml)
    train.main_loop()
Exemple #13
0
def test():
    skip_if_no_data()

    dirname = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')

    with open(os.path.join(dirname, 'sr_dataset.yaml'), 'r') as f:
        dataset = f.read()

    hyper_params = {'train_stop': 50}
    dataset = dataset % (hyper_params)

    with open(os.path.join(dirname, 'sr_model.yaml'), 'r') as f:
        model = f.read()

    with open(os.path.join(dirname, 'sr_algorithm.yaml'), 'r') as f:
        algorithm = f.read()

    hyper_params = {'batch_size': 10,
                    'valid_stop': 50050}
    algorithm = algorithm % (hyper_params)

    with open(os.path.join(dirname, 'sr_train.yaml'), 'r') as f:
        train = f.read()

    save_path = os.path.dirname(os.path.realpath(__file__))
    train = train % locals()

    train = yaml_parse.load(train)
    train.main_loop()

    try:
        os.remove("{}/softmax_regression.pkl".format(save_path))
        os.remove("{}/softmax_regression_best.pkl".format(save_path))
    except:
        pass
Exemple #14
0
def test():
    skip_if_no_data()

    dirname = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')

    with open(os.path.join(dirname, 'sr_dataset.yaml'), 'r') as f:
        dataset = f.read()

    hyper_params = {'train_stop': 50}
    dataset = dataset % (hyper_params)

    with open(os.path.join(dirname, 'sr_model.yaml'), 'r') as f:
        model = f.read()

    with open(os.path.join(dirname, 'sr_algorithm.yaml'), 'r') as f:
        algorithm = f.read()

    hyper_params = {'batch_size': 10, 'valid_stop': 50050}
    algorithm = algorithm % (hyper_params)

    with open(os.path.join(dirname, 'sr_train.yaml'), 'r') as f:
        train = f.read()

    train = train % locals()

    train = yaml_parse.load(train)
    train.main_loop()
Exemple #15
0
def test_convolutional_network():

    skip.skip_if_no_data()
    yaml_file_path = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '..'))
    save_path = os.path.dirname(os.path.realpath(__file__))

    yaml = open("{0}/conv.yaml".format(yaml_file_path), 'r').read()
    hyper_params = {
        'train_stop': 50,
        'valid_stop': 50050,
        'test_stop': 50,
        'batch_size': 50,
        'output_channels_h2': 4,
        'output_channels_h3': 4,
        'max_epochs': 1,
        'save_path': save_path
    }
    yaml = yaml % (hyper_params)
    train = yaml_parse.load(yaml)
    train.main_loop()

    try:
        os.remove("{}/convolutional_network_best.pkl".format(save_path))
    except OSError:
        pass
def test():
    skip_if_no_data()

    dirname = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')

    with open(os.path.join(dirname, 'sr_dataset.yaml'), 'r') as f:
        dataset = f.read()

    hyper_params = {'train_stop': 50}
    dataset = dataset % (hyper_params)

    with open(os.path.join(dirname, 'sr_model.yaml'), 'r') as f:
        model = f.read()

    with open(os.path.join(dirname, 'sr_algorithm.yaml'), 'r') as f:
        algorithm = f.read()

    hyper_params = {'batch_size': 10,
                    'valid_stop': 50050}
    algorithm = algorithm % (hyper_params)

    with open(os.path.join(dirname, 'sr_train.yaml'), 'r') as f:
        train = f.read()

    train = train % locals()

    train = yaml_parse.load(train)
    train.main_loop()
Exemple #17
0
def CNN():
    """Test smaller version of convolutional_network.ipynb"""
    skip.skip_if_no_data()
    yaml_file_path = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '..'))
    save_path = os.path.dirname(os.path.realpath(__file__))
    # Escape potential backslashes in Windows filenames, since
    # they will be processed when the YAML parser will read it
    # as a string
    #save_path.replace('\\', r'\\')

    yaml = open("{0}/base.yaml".format(yaml_file_path), 'r').read()
    hyper_params = {
        'train_stop': 40000,
        'valid_stop': 50000,
        'test_stop': 10000,
        'batch_size': 100,
        'output_channels_h1': 64,
        'output_channels_h2': 128,
        'output_channels_h3': 600,
        'max_epochs': 500,
        'save_path': save_path
    }
    yaml = yaml % (hyper_params)
    train = yaml_parse.load(yaml)
    train.main_loop()
Exemple #18
0
def test_ule():
    skip_if_no_data()
    # Test loading of transfer data
    train, valid, test, transfer = utlc.load_ndarray_dataset("ule",
                                                             normalize=True,
                                                             transfer=True)
    assert train.shape[0] == transfer.shape[0]
Exemple #19
0
def test_convolutional_network():
    """Test smaller version of convolutional_network.ipynb"""
    skip.skip_if_no_data()
    yaml_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                                  '..'))
    save_path = os.path.dirname(os.path.realpath(__file__))
    # Escape potential backslashes in Windows filenames, since
    # they will be processed when the YAML parser will read it
    # as a string
    save_path.replace('\\', r'\\')

    yaml = open("{0}/conv.yaml".format(yaml_file_path), 'r').read()
    hyper_params = {'train_stop': 50,
                    'valid_stop': 50050,
                    'test_stop': 50,
                    'batch_size': 50,
                    'output_channels_h2': 4,
                    'output_channels_h3': 4,
                    'max_epochs': 1,
                    'save_path': save_path}
    yaml = yaml % (hyper_params)
    train = yaml_parse.load(yaml)
    train.main_loop()

    try:
        os.remove("{}/convolutional_network_best.pkl".format(save_path))
    except OSError:
        pass
def CNN():
    """Test smaller version of convolutional_network.ipynb"""
    which_experiment = 'ADD3_10_S100'
    skip.skip_if_no_data()
    yaml_file_path = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '..'))
    data_dir = string_utils.preprocess('${PYLEARN2_DATA_PATH}')
    save_path = os.path.join(data_dir, 'cifar10',
                             'experiment_' + string.lower(which_experiment))
    # Escape potential backslashes in Windows filenames, since
    # they will be processed when the YAML parser will read it
    # as a string
    #save_path.replace('\\', r'\\')

    yaml = open("{0}/experiment_add3_10_s100.yaml".format(yaml_file_path),
                'r').read()
    hyper_params = {
        'batch_size': 64,
        'output_channels_h1': 64,
        'output_channels_h2': 128,
        'output_channels_h3': 600,
        'max_epochs': 100,
        'save_path': save_path
    }
    yaml = yaml % (hyper_params)
    train = yaml_parse.load(yaml)
    train.main_loop()
Exemple #21
0
def test_ule():
    skip_if_no_data()
    # Test loading of transfer data
    train, valid, test, transfer = utlc.load_ndarray_dataset("ule",
                                                             normalize=True,
                                                             transfer=True)
    assert train.shape[0] == transfer.shape[0]
Exemple #22
0
 def setUp(self):
     """
     Attempts to load train and test
     """
     skip_if_no_data()
     self.train = MNIST_rotated_background(which_set='train')
     self.test = MNIST_rotated_background(which_set='test')
Exemple #23
0
def test_FoveatedNORB():
    """
    This function tests the FoveatedNORB class. In addition to the shape and
    datatype of X and y member of the returned object, it also checks the
    scale of data while passing different parameters to the constructor.
    """
    skip_if_no_data()
    data = FoveatedNORB('train')
    datamin = data.X.min()
    datamax = data.X.max()
    assert data.X.shape == (24300, 8976)
    assert data.X.dtype == 'float32'
    assert data.y.shape == (24300, )
    assert data.y_labels == 5
    assert data.get_topological_view().shape == (24300, 96, 96, 2)

    data = FoveatedNORB('train', center=True)
    assert data.X.min() == datamin - 127.5
    assert data.X.max() == datamax - 127.5

    data = FoveatedNORB('train', center=True, scale=True)
    assert numpy.all(data.X <= 1.)
    assert numpy.all(data.X >= -1.)

    data = FoveatedNORB('train', scale=True)
    assert numpy.all(data.X <= 1.)
    assert numpy.all(data.X >= 0.)

    data = FoveatedNORB('test')
    assert data.X.shape == (24300, 8976)
    assert data.X.dtype == 'float32'
    assert data.y.shape == (24300, )
    assert data.y_labels == 5
    assert data.get_topological_view().shape == (24300, 96, 96, 2)
Exemple #24
0
 def setUp(self):
     skip_if_no_data()
     self.train_set = CIFAR100(which_set='train')
     self.test_set = CIFAR100(which_set='test')
     assert not np.any(np.isnan(self.train_set.X))
     assert not np.any(np.isinf(self.train_set.X))
     assert not np.any(np.isnan(self.test_set.X))
     assert not np.any(np.isinf(self.test_set.X))
Exemple #25
0
 def setUp(self):
     """Load the train and test sets; check for nan and inf."""
     skip_if_no_data()
     self.train_set = CIFAR100(which_set='train')
     self.test_set = CIFAR100(which_set='test')
     assert not np.any(np.isnan(self.train_set.X))
     assert not np.any(np.isinf(self.train_set.X))
     assert not np.any(np.isnan(self.test_set.X))
     assert not np.any(np.isinf(self.test_set.X))
Exemple #26
0
def train_dbm():

    skip.skip_if_no_data()

    yaml_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                                  '../dbm_demo'))
    save_path = os.path.dirname(os.path.realpath(__file__))

    train(yaml_file_path, save_path)
Exemple #27
0
def test_cos_dataset():
    """Tests if the dataset generator yields the desired value."""
    skip_if_no_data()
    dataset = CosDataset()

    sample_batch = dataset.get_batch_design(batch_size=10000)
    assert sample_batch.shape == (10000, 2)
    assert sample_batch[:, 0].min() >= dataset.min_x
    assert sample_batch[:, 0].max() <= dataset.max_x
Exemple #28
0
def test_one_hot():
    skip_if_no_data()
    data = icml07.MNIST_rotated_background(which_set='train', one_hot=True,
                                           split=(100, 100, 100))
    assert data.y.shape[1] == 10   # MNITS hast 10 classes

    data = icml07.Rectangles(which_set='train', one_hot=True,
                             split=(100, 100, 100))
    assert data.y.shape[1] == 2   # Two classes
Exemple #29
0
    def setUp(self):
        """
        Set up test for DenseMulticlassSVM.

        Imports DenseMulticlassSVM if available, skips the test otherwise.
        """
        global DenseMulticlassSVM
        skip_if_no_sklearn()
        skip_if_no_data()
        import pylearn2.models.svm
        DenseMulticlassSVM = pylearn2.models.svm.DenseMulticlassSVM
def test_npy_npz():
    skip_if_no_data()
    arr = np.array([[3,4,5],[4,5,6]])
    np.save('test.npy', arr)
    np.savez('test.npz', arr)
    npy = NpyDataset(file='test.npy')
    npy._deferred_load()
    npz = NpzDataset(file='test.npz', key='arr_0')
    assert np.all(npy.X == npz.X)
    os.remove('test.npy')
    os.remove('test.npz')
Exemple #31
0
def test_sda():

    skip.skip_if_no_data()

    yaml_file_path = '.';
    save_path = '.'

    train_layer1(yaml_file_path, save_path)
    train_layer2(yaml_file_path, save_path)
    train_layer3(yaml_file_path, save_path)
    train_mlp(yaml_file_path, save_path)
Exemple #32
0
    def setUp(self):
        """
        Set up test for DenseMulticlassSVM.

        Imports DenseMulticlassSVM if available, skips the test otherwise.
        """
        global DenseMulticlassSVM
        skip_if_no_sklearn()
        skip_if_no_data()
        import pylearn2.models.svm
        DenseMulticlassSVM = pylearn2.models.svm.DenseMulticlassSVM
Exemple #33
0
def test_all_utlc():
    skip_if_no_data()
    for name in ['avicenna','harry','ule']:   # not testing rita, because it requires a lot of memorz and is slow
        print "Loading ", name
        train, valid, test = utlc.load_ndarray_dataset(name, normalize=True)
        print "dtype, max, min, mean, std"
        print train.dtype, train.max(), train.min(), train.mean(), train.std()
        assert isinstance(train, numpy.ndarray), "train is not an ndarray in %s dataset" % name
        assert isinstance(valid, numpy.ndarray), "valid is not an ndarray in %s dataset" % name
        assert isinstance(test, numpy.ndarray), "test is not an ndarray in %s dataset" % name
        assert train.shape[1]==test.shape[1]==valid.shape[1], "shapes of datasets does not match for %s" % name
def test_mnist():
    """
    Tests mnist.yaml by running it for only one epoch
    """

    skip_if_no_data()
    limited_epoch_train(os.path.join(yaml_file_path, 'mnist.yaml'))
    try:
        os.remove(os.path.join(save_path, 'mnist.pkl'))
        os.remove(os.path.join(save_path, 'mnist_best.pkl'))
    except:
        pass
Exemple #35
0
def test_all_utlc():
    skip_if_no_data()
    # not testing rita, because it requires a lot of memorz and is slow
    for name in ['avicenna', 'harry', 'ule']:
        print("Loading ", name)
        train, valid, test = utlc.load_ndarray_dataset(name, normalize=True)
        print("dtype, max, min, mean, std")
        print(train.dtype, train.max(), train.min(), train.mean(), train.std())
        assert isinstance(train, numpy.ndarray)
        assert isinstance(valid, numpy.ndarray)
        assert isinstance(test, numpy.ndarray)
        assert train.shape[1] == test.shape[1] == valid.shape[1]
Exemple #36
0
 def setUp(self):
     skip_if_no_data()
     self.train = TFD(which_set='train')
     self.test = TFD(which_set='test')
     valid = TFD(which_set='valid')
     unlabeled = TFD(which_set='unlabeled')
     full_train = TFD(which_set='full_train')
     large = TFD(which_set='test', image_size=96)
     fold1 = TFD(which_set='test', fold=1)
     fold2 = TFD(which_set='test', fold=2)
     fold3 = TFD(which_set='test', fold=3)
     fold4 = TFD(which_set='test', fold=4)
def test_mnist():
    """
    Tests mnist.yaml by running it for only one epoch
    """

    skip_if_no_data()
    limited_epoch_train(os.path.join(yaml_file_path, 'mnist.yaml'))
    try:
        os.remove(os.path.join(save_path, 'mnist.pkl'))
        os.remove(os.path.join(save_path, 'mnist_best.pkl'))
    except:
        pass
Exemple #38
0
 def setUp(self):
     skip_if_no_data()
     self.train = TFD(which_set='train')
     self.test = TFD(which_set='test')
     valid = TFD(which_set='valid')
     unlabeled = TFD(which_set='unlabeled')
     full_train = TFD(which_set='full_train')
     large = TFD(which_set='test', image_size=96)
     fold1 = TFD(which_set='test', fold=1)
     fold2 = TFD(which_set='test', fold=2)
     fold3 = TFD(which_set='test', fold=3)
     fold4 = TFD(which_set='test', fold=4)
def test_sda():

    skip.skip_if_no_data()

    yaml_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                                  '..'))
    save_path = os.path.dirname(os.path.realpath(__file__))

    train_layer1(yaml_file_path, save_path)
    train_layer2(yaml_file_path, save_path)
    train_layer3(yaml_file_path, save_path)
    train_mlp(yaml_file_path, save_path)
Exemple #40
0
def test_all_utlc():
    skip_if_no_data()
    # not testing rita, because it requires a lot of memorz and is slow
    for name in ['avicenna', 'harry', 'ule']:
        print "Loading ", name
        train, valid, test = utlc.load_ndarray_dataset(name, normalize=True)
        print "dtype, max, min, mean, std"
        print train.dtype, train.max(), train.min(), train.mean(), train.std()
        assert isinstance(train, numpy.ndarray)
        assert isinstance(valid, numpy.ndarray)
        assert isinstance(test, numpy.ndarray)
        assert train.shape[1] == test.shape[1] == valid.shape[1]
Exemple #41
0
def test_show_examples():
    """
    Create a YAML file of the MNIST dataset and show examples
    """
    skip_if_no_matplotlib()
    skip_if_no_data()
    with open('temp.yaml', 'w') as f:
        f.write("""
!obj:pylearn2.datasets.mnist.MNIST {
        which_set: 'train'
}
""")
    show_examples('temp.yaml', 28, 28, out='garbage.png')
    os.remove('temp.yaml')
Exemple #42
0
def test_part_2():
    skip_if_no_data()
    with open(os.path.join(YAML_FILE_PATH,
              'mlp_tutorial_part_2.yaml'), 'r') as f:
        train = f.read()
    hyper_params = {'train_stop': 50,
                    'valid_stop': 50050,
                    'dim_h0': 5,
                    'max_epochs': 1,
                    'save_path': SAVE_PATH}
    train = train % (hyper_params)
    train = yaml_parse.load(train)
    train.main_loop()
    cleaunup("mlp_best.pkl")
Exemple #43
0
def test_dbm():

    skip.skip_if_no_data()

    yaml_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                                  '../dbm_demo'))
    save_path = os.path.dirname(os.path.realpath(__file__))

    train(yaml_file_path, save_path)

    try:
        os.remove("{}/dbm.pkl".format(save_path))
    except:
        pass
Exemple #44
0
def test_part_2():
    skip_if_no_data()
    with open(os.path.join(pylearn2.__path__[0], 'scripts', 'tutorials',
              'mlp_tutorial_part_2.yaml'), 'r') as f:
        train = f.read()
    f.close()
    hyper_params = {'train_stop': 50,
                    'valid_stop': 50050,
                    'dim_h0': 5,
                    'max_epochs': 1}
    train = train % (hyper_params)
    print train
    train = yaml_parse.load(train)
    train.main_loop()
Exemple #45
0
def test_part_2():
    skip_if_no_data()
    with open(os.path.join(YAML_FILE_PATH,
              'mlp_tutorial_part_2.yaml'), 'r') as f:
        train = f.read()
    f.close()
    hyper_params = {'train_stop': 50000,
                    'valid_stop': 60000,
                    'dim_h0': 500,
                    'max_epochs': 10000,
                    'save_path': SAVE_PATH}
    train = train % (hyper_params)
    train = yaml_parse.load(train)
    train.main_loop()
    cleaunup("mlp_best.pkl")
def test_mnist():
    """
    Tests mnist.yaml by running it for only one epoch
    """
    if config.mode == "DEBUG_MODE":
        yaml_file = 'mnist_fast'
    else:
        yaml_file = 'mnist'
    skip_if_no_data()
    limited_epoch_train(os.path.join(yaml_file_path, '%s.yaml' % yaml_file))
    try:
        os.remove(os.path.join(save_path, '%s.pkl' % yaml_file))
        os.remove(os.path.join(save_path, '%s_best.pkl' % yaml_file))
    except:
        pass
Exemple #47
0
def test_adult():
    """
    Tests if it will work correctly for train and test set.
    """
    skip_if_no_data()
    adult_train = adult(which_set='train')
    assert (adult_train.X >= 0.).all()
    assert adult_train.y.dtype == bool
    assert adult_train.X.shape == (30162, 104)
    assert adult_train.y.shape == (30162, 1)

    adult_test = adult(which_set='test')
    assert (adult_test.X >= 0.).all()
    assert adult_test.y.dtype == bool
    assert adult_test.X.shape == (15060, 103)
    assert adult_test.y.shape == (15060, 1)