Exemple #1
0
    def setUp(self):
        skip_if_no_h5py()
        import h5py
        skip_if_no_data()
        from pylearn2.datasets.mnist import MNIST

        # save MNIST data to HDF5
        train = MNIST(which_set='train', one_hot=1, start=0, stop=100)
        for name, dataset in [('train', train)]:
            with h5py.File("{}.h5".format(name), "w") as f:
                f.create_dataset('X', data=dataset.get_design_matrix())
                f.create_dataset('topo_view',
                                 data=dataset.get_topological_view())
                f.create_dataset('y', data=dataset.get_targets())

        # instantiate Train object
        self.train = yaml_parse.load(trainer_yaml)
Exemple #2
0
    def setUp(self):
        skip_if_no_h5py()
        import h5py
        skip_if_no_data()
        from pylearn2.datasets.mnist import MNIST

        # save MNIST data to HDF5
        train = MNIST(which_set='train', one_hot=1, start=0, stop=100)
        for name, dataset in [('train', train)]:
            with h5py.File("{}.h5".format(name), "w") as f:
                f.create_dataset('X', data=dataset.get_design_matrix())
                f.create_dataset('topo_view',
                                 data=dataset.get_topological_view())
                f.create_dataset('y', data=dataset.get_targets())

        # instantiate Train object
        self.train = yaml_parse.load(trainer_yaml)
Exemple #3
0
def test_hdf5_convert_to_one_hot():
    """Train using an HDF5 dataset with one-hot target conversion."""
    skip_if_no_h5py()
    import h5py

    # save random data to HDF5
    handle, filename = tempfile.mkstemp()
    dataset = random_dense_design_matrix(np.random.RandomState(1),
                                         num_examples=10, dim=5, num_classes=3)
    with h5py.File(filename, 'w') as f:
        f.create_dataset('X', data=dataset.get_design_matrix())
        f.create_dataset('y', data=dataset.get_targets())

    # instantiate Train object
    trainer = yaml_parse.load(convert_to_one_hot_yaml % {'filename': filename})
    trainer.main_loop()

    # cleanup
    os.remove(filename)
Exemple #4
0
def test_hdf5_topo_view():
    """Train using an HDF5 dataset with topo_view instead of X."""
    skip_if_no_h5py()
    import h5py

    # save random data to HDF5
    handle, filename = tempfile.mkstemp()
    dataset = random_one_hot_topological_dense_design_matrix(
        np.random.RandomState(1), num_examples=10, shape=(2, 2), channels=3,
        axes=('b', 0, 1, 'c'), num_classes=3)
    with h5py.File(filename, 'w') as f:
        f.create_dataset('topo_view', data=dataset.get_topological_view())
        f.create_dataset('y', data=dataset.get_targets())

    # instantiate Train object
    trainer = yaml_parse.load(topo_view_yaml % {'filename': filename})
    trainer.main_loop()

    # cleanup
    os.remove(filename)
Exemple #5
0
def test_hdf5_load_all():
    """Train using an HDF5 dataset with all data loaded into memory."""
    skip_if_no_h5py()
    import h5py

    # save random data to HDF5
    handle, filename = tempfile.mkstemp()
    dataset = random_one_hot_dense_design_matrix(np.random.RandomState(1),
                                                 num_examples=10,
                                                 dim=5,
                                                 num_classes=3)
    with h5py.File(filename, 'w') as f:
        f.create_dataset('X', data=dataset.get_design_matrix())
        f.create_dataset('y', data=dataset.get_targets())

    # instantiate Train object
    trainer = yaml_parse.load(load_all_yaml % {'filename': filename})
    trainer.main_loop()

    # cleanup
    os.remove(filename)
Exemple #6
0
def test_hdf5_topo_view():
    """Train using an HDF5 dataset with topo_view instead of X."""
    skip_if_no_h5py()
    import h5py

    # save random data to HDF5
    handle, filename = tempfile.mkstemp()
    dataset = random_one_hot_topological_dense_design_matrix(
        np.random.RandomState(1),
        num_examples=10,
        shape=(2, 2),
        channels=3,
        axes=('b', 0, 1, 'c'),
        num_classes=3)
    with h5py.File(filename, 'w') as f:
        f.create_dataset('topo_view', data=dataset.get_topological_view())
        f.create_dataset('y', data=dataset.get_targets())

    # instantiate Train object
    trainer = yaml_parse.load(topo_view_yaml % {'filename': filename})
    trainer.main_loop()

    # cleanup
    os.remove(filename)