Ejemplo n.º 1
0
def yaml_file_execution(file_path):
    try:
        train = yaml_parse.load_path(file_path)
        train.algorithm.termination_criterion = EpochCounter(max_epochs=2)
        train.main_loop()
    except NoDataPathError:
        raise SkipTest("PYLEARN2_DATA_PATH environment variable not defined")
Ejemplo n.º 2
0
def yaml_file_execution(file_path):
    try:
        train = yaml_parse.load_path(file_path)
        train.algorithm.termination_criterion = EpochCounter(max_epochs=2)
        train.main_loop()
    except NoDataPathError:
        raise SkipTest("PYLEARN2_DATA_PATH environment variable not defined")
Ejemplo n.º 3
0
def load_path(path, environ=None, **kwargs):
    """
    Convenience function for loading a YAML configuration from a file
    into a `PartialPlus` graph.

    Parameters
    ----------
    path : str
        The path to the file to load on disk.
    environ : dict, optional
        A dictionary used for ${FOO} substitutions in addition to
        environment variables. If a key appears both in `os.environ`
        and this dictionary, the value in this dictionary is used.

    Returns
    -------
    graph : Node
        A `PartialPlus` or `Literal` node representing the root
        node of the YAML hierarchy.

    Notes
    -----
    Other keyword arguments are passed on to `yaml.load`.
    """
    return proxy_to_partialplus(yaml_parse.load_path(path, instantiate=False,
                                                     **kwargs),
                                environ=environ)
Ejemplo n.º 4
0
def load_path(path, environ=None, **kwargs):
    """
    Convenience function for loading a YAML configuration from a file
    into a `PartialPlus` graph.

    Parameters
    ----------
    path : str
        The path to the file to load on disk.
    environ : dict, optional
        A dictionary used for ${FOO} substitutions in addition to
        environment variables. If a key appears both in `os.environ`
        and this dictionary, the value in this dictionary is used.

    Returns
    -------
    graph : Node
        A `PartialPlus` or `Literal` node representing the root
        node of the YAML hierarchy.

    Notes
    -----
    Other keyword arguments are passed on to `yaml.load`.
    """
    return proxy_to_partialplus(yaml_parse.load_path(path,
                                                     instantiate=False,
                                                     **kwargs),
                                environ=environ)
Ejemplo n.º 5
0
def test_load_path():
    fd, fname = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as f:
        f.write("a: 23")
    loaded = load_path(fname)
    assert_(loaded['a'] == 23)
    os.remove(fname)
Ejemplo n.º 6
0
def load_train_file(config_file_path, environ=None):
    """
    Loads and parses a yaml file for a Train object.
    Publishes the relevant training environment variables

    Parameters
    ----------
    config_file_path : WRITEME

    Returns
    -------
    WRITEME
    """
    from pylearn2.config import yaml_parse

    suffix_to_strip = '.yaml'

    # publish environment variables related to file name
    if config_file_path.endswith(suffix_to_strip):
        config_file_full_stem = config_file_path[0:-len(suffix_to_strip)]
    else:
        config_file_full_stem = config_file_path

    for varname in ["PYLEARN2_TRAIN_FILE_FULL_STEM"]:
        os.environ[varname] = config_file_full_stem

    directory = config_file_path.split('/')[:-1]
    directory = '/'.join(directory)
    if directory != '':
        directory += '/'
    os.environ["PYLEARN2_TRAIN_DIR"] = directory
    os.environ["PYLEARN2_TRAIN_BASE_NAME"] = config_file_path.split('/')[-1]
    os.environ["PYLEARN2_TRAIN_FILE_STEM"] = config_file_full_stem.split('/')[-1]

    return yaml_parse.load_path(config_file_path, environ=environ)
Ejemplo n.º 7
0
def load_train_file(config_file_path):
    """Loads and parses a yaml file for a Train object.
    Publishes the relevant training environment variables"""
    from pylearn2.config import yaml_parse

    suffix_to_strip = '.yaml'

    # publish environment variables related to file name
    if config_file_path.endswith(suffix_to_strip):
        config_file_full_stem = config_file_path[0:-len(suffix_to_strip)]
    else:
        config_file_full_stem = config_file_path

    for varname in ["PYLEARN2_TRAIN_FILE_NAME", #this one is deprecated
            "PYLEARN2_TRAIN_FILE_FULL_STEM"]: #this is the new, accepted name
        environ.putenv(varname, config_file_full_stem)

    directory = config_file_path.split('/')[:-1]
    directory = '/'.join(directory)
    if directory != '':
        directory += '/'
    environ.putenv("PYLEARN2_TRAIN_DIR", directory)
    environ.putenv("PYLEARN2_TRAIN_BASE_NAME", config_file_path.split('/')[-1] )
    environ.putenv("PYLEARN2_TRAIN_FILE_STEM", config_file_full_stem.split('/')[-1] )

    return yaml_parse.load_path(config_file_path)
Ejemplo n.º 8
0
def test_IS_cost():
    """
    VAE trains properly with the importance sampling cost
    """
    yaml_src_path = os.path.join(os.path.dirname(__file__), "test_vae_cost_is_criterion.yaml")
    train_object = yaml_parse.load_path(yaml_src_path)
    train_object.main_loop()
Ejemplo n.º 9
0
def test_IS_cost():
    """
    VAE trains properly with the importance sampling cost
    """
    train_object = yaml_parse.load_path('pylearn2/models/tests/'
                                        'test_vae_cost_is_criterion.yaml')
    train_object.main_loop()
Ejemplo n.º 10
0
def load_train_file(config_file_path):
    """Loads and parses a yaml file for a Train object.
    Publishes the relevant training environment variables"""
    from pylearn2.config import yaml_parse

    suffix_to_strip = '.yaml'

    # publish environment variables related to file name
    if config_file_path.endswith(suffix_to_strip):
        config_file_full_stem = config_file_path[0:-len(suffix_to_strip)]
    else:
        config_file_full_stem = config_file_path

    for varname in [
            "PYLEARN2_TRAIN_FILE_NAME",  #this one is deprecated
            "PYLEARN2_TRAIN_FILE_FULL_STEM"
    ]:  #this is the new, accepted name
        environ.putenv(varname, config_file_full_stem)

    environ.putenv("PYLEARN2_TRAIN_DIR",
                   '/'.join(config_file_path.split('/')[:-1]))
    environ.putenv("PYLEARN2_TRAIN_BASE_NAME", config_file_path.split('/')[-1])
    environ.putenv("PYLEARN2_TRAIN_FILE_STEM",
                   config_file_full_stem.split('/')[-1])

    return yaml_parse.load_path(config_file_path)
Ejemplo n.º 11
0
def test_VAE_cost():
    """
    VAE trains properly with the VAE cost
    """
    train_object = yaml_parse.load_path('pylearn2/models/tests/'
                                        'test_vae_cost_vae_criterion.yaml')
    train_object.main_loop()
Ejemplo n.º 12
0
def test_load_path():
    fd, fname = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as f:
        f.write("a: 23")
    loaded = load_path(fname)
    assert_(loaded['a'] == 23)
    os.remove(fname)
Ejemplo n.º 13
0
 def test_load_from_yaml(self):
     """
     Load dataset from an yaml file.
     """
     imdset = yaml_parse.load_path(self.yaml_file)
     imdset = imdset['dataset']
     self.assertEqual(len(imdset.adjusters), 6)
Ejemplo n.º 14
0
def test_IS_cost():
    """
    VAE trains properly with the importance sampling cost
    """
    yaml_src_path = os.path.join(os.path.dirname(__file__),
                                 'test_vae_cost_is_criterion.yaml')
    train_object = yaml_parse.load_path(yaml_src_path)
    train_object.main_loop()
Ejemplo n.º 15
0
 def construct_model(self):
     filedir = os.path.join(os.path.dirname(__file__), 'mlps.yaml')
     layer_args = yaml_parse.load_path(filedir)[self.modelname]
     layers = []
     
     # adapt in case of 2d layer
     if (self.conv_class == ConvElemwise):
         self.adapt_for_2d_conv(layer_args)
     else:
         self.adapt_for_time_dim(layer_args)
     print layer_args
         
     for i, layer_arg in enumerate(layer_args):
         layer = self.construct_layer(layer_arg, i)
         layers.append(layer)
     input_space = self.create_input_space()
     mlp = MLP(input_space=input_space, layers=layers)
     return mlp
Ejemplo n.º 16
0
def load_train_file(config_file_path, environ=None):
    """
    Loads and parses a yaml file for a Train object.
    Publishes the relevant training environment variables

    Parameters
    ----------
    config_file_path : str
        Path to a config file containing a YAML string describing a
        pylearn2.train.Train object
    environ : dict, optional
        A dictionary used for ${FOO} substitutions in addition to
        environment variables when parsing the YAML file. If a key appears
        both in `os.environ` and this dictionary, the value in this
        dictionary is used.


    Returns
    -------
    Object described by the YAML string stored in the config file
    """
    from pylearn2.config import yaml_parse

    suffix_to_strip = '.yaml'

    # Publish environment variables related to file name
    if config_file_path.endswith(suffix_to_strip):
        config_file_full_stem = config_file_path[0:-len(suffix_to_strip)]
    else:
        config_file_full_stem = config_file_path

    os.environ["PYLEARN2_TRAIN_FILE_FULL_STEM"] = config_file_full_stem

    directory = config_file_path.split('/')[:-1]
    directory = '/'.join(directory)
    if directory != '':
        directory += '/'
    os.environ["PYLEARN2_TRAIN_DIR"] = directory
    os.environ["PYLEARN2_TRAIN_BASE_NAME"] = config_file_path.split('/')[-1]
    os.environ["PYLEARN2_TRAIN_FILE_STEM"] = config_file_full_stem.split(
        '/')[-1]

    return yaml_parse.load_path(config_file_path, environ=environ)
Ejemplo n.º 17
0
def load_train_file(config_file_path, environ=None):
    """
    Loads and parses a yaml file for a Train object.
    Publishes the relevant training environment variables

    Parameters
    ----------
    config_file_path : str
        Path to a config file containing a YAML string describing a
        pylearn2.train.Train object
    environ : dict, optional
        A dictionary used for ${FOO} substitutions in addition to
        environment variables when parsing the YAML file. If a key appears
        both in `os.environ` and this dictionary, the value in this
        dictionary is used.


    Returns
    -------
    Object described by the YAML string stored in the config file
    """
    from pylearn2.config import yaml_parse

    suffix_to_strip = '.yaml'

    # Publish environment variables related to file name
    if config_file_path.endswith(suffix_to_strip):
        config_file_full_stem = config_file_path[0:-len(suffix_to_strip)]
    else:
        config_file_full_stem = config_file_path

    os.environ["PYLEARN2_TRAIN_FILE_FULL_STEM"] = config_file_full_stem

    directory = config_file_path.split('/')[:-1]
    directory = '/'.join(directory)
    if directory != '':
        directory += '/'
    os.environ["PYLEARN2_TRAIN_DIR"] = directory
    os.environ["PYLEARN2_TRAIN_BASE_NAME"] = config_file_path.split('/')[-1]
    os.environ[
        "PYLEARN2_TRAIN_FILE_STEM"] = config_file_full_stem.split('/')[-1]

    return yaml_parse.load_path(config_file_path, environ=environ)
Ejemplo n.º 18
0
    def load_yaml(self, fname):
        """
        Slot that loads a YAML file.
        """
        if not fname:
            return
        try:
            # publish environment variables relevant to this file
            serial.prepare_train_file(fname)

            # load the tree of Proxy objects
            environ = {}
            yaml_tree = yaml_parse.load_path(fname,
                                             instantiate=False,
                                             environ=environ)
            yaml_tree = yaml_parse._instantiate(yaml_tree)
            self.show_object_tree(yaml_tree)
        except Exception, exc:
            logger.error('Loading aml file failed', exc_info=True)
            QtGui.QMessageBox.warning(self, 'Exception', str(exc))
Ejemplo n.º 19
0
            t6 = time.time()

            print(t6 - t1, t2 - t1, t3 - t2, t4 - t3, t5 - t4, t6 - t5)

        if self.chunk_size is not None:
            assert save_path.endswith('.npy')
            save_path_pieces = save_path.split('.npy')
            assert len(save_path_pieces) == 2
            assert save_path_pieces[1] == ''
            save_path = save_path_pieces[0] + '_' + chr(
                ord('A') + self.chunk_id) + '.npy'
        np.save(save_path, output)

        if nan > 0:
            warnings.warn(str(nan) + ' features were nan')


if __name__ == '__main__':
    assert len(sys.argv) == 2
    yaml_path = sys.argv[1]

    assert yaml_path.endswith('.yaml')
    val = yaml_path[0:-len('.yaml')]
    os.environ['FEATURE_EXTRACTOR_YAML_PATH'] = val
    os.putenv('FEATURE_EXTRACTOR_YAML_PATH', val)

    extractor = yaml_parse.load_path(yaml_path)

    extractor()
Ejemplo n.º 20
0
import os
import shutil

from pylearn2.config import yaml_parse
from pylearn2.utils import serial
from pylearn2.utils import shell


status, rc = shell.run_shell_command("qstat -u goodfell -t @hades")
assert rc == 0

results = open("results.dat", "r")
lines = results.readlines()
results.close()

params = yaml_parse.load_path('params.yaml')
added = 0
print 'Experiment numbers reported by this script start at 0.'
print 'Keep in mind that vim will refer to the first line of results.dat as line 1'
for expnum, line in enumerate(lines):
    elems = line.split(' ')
    assert elems[-1] == '\n'
    obj = elems[0]
    if obj == 'P':
        # print expnum, 'pending according to results.dat'
        expdir = '/RQexec/goodfell/experiment_7/%d' % expnum
        if not os.path.exists(expdir):
            print 'Experiment not yet configured for experiment',expnum
            continue
        cluster_info = expdir + '/cluster_info.txt'
        if not os.path.exists(cluster_info):
Ejemplo n.º 21
0
__author__ = "Ian Goodfellow"

from pylearn2.config import yaml_parse
import sys

_, path = sys.argv

simulator = yaml_parse.load_path(path)

simulator.main_loop()
Ejemplo n.º 22
0
import os
from pylearn2.utils.serial import mkdir
from pylearn2.config import yaml_parse
from pylearn2.utils import safe_zip
import shutil

results = open("results.dat", "r")
lines = results.readlines()
results.close()

params = yaml_parse.load_path('params.yaml')

validate = open('validate.yaml', 'r')
validate_template = validate.read()
validate.close()

for expnum, line in enumerate(lines):
    elems = line.split(' ')
    assert elems[-1] == '\n'
    obj = elems[0]
    if obj == 'P':
        expdir = '/RQexec/goodfell/experiment_6/%d' % expnum
        if os.path.exists(expdir):
            continue
        try:
            mkdir(expdir)

            config = {}
            for param, value in safe_zip(params, elems[2:-1]):
                if param['type'] == 'float':
                    value = float(value)
Ejemplo n.º 23
0
    patch_rescale = False
elif rescale == 'global':
    global_rescale = True
    patch_rescale = False
elif rescale == 'individual':
    global_rescale = False
    patch_rescale = True
else:
    assert False

if path.endswith('.pkl'):
    from pylearn2.utils import serial
    obj = serial.load(path)
elif path.endswith('.yaml'):
    print 'Building dataset from yaml...'
    obj = yaml_parse.load_path(path)
    print '...done'
else:
    obj = yaml_parse.load(path)

rows = options.rows
cols = options.cols

if hasattr(obj, 'get_batch_topo'):
    #obj is a Dataset
    dataset = obj

    examples = dataset.get_batch_topo(rows * cols)
else:
    #obj is a Model
    model = obj
def main(options, positional_args):
    assert len(positional_args) == 1

    path ,= positional_args

    out = options.out
    rescale = options.rescale

    if rescale == 'none':
        global_rescale = False
        patch_rescale = False
    elif rescale == 'global':
        global_rescale = True
        patch_rescale = False
    elif rescale == 'individual':
        global_rescale = False
        patch_rescale = True
    else:
        assert False

    if path.endswith('.pkl'):
        from pylearn2.utils import serial
        obj = serial.load(path)
    elif path.endswith('.yaml'):
        print 'Building dataset from yaml...'
        obj =yaml_parse.load_path(path)
        print '...done'
    else:
        obj = yaml_parse.load(path)

    rows = options.rows
    cols = options.cols

    if hasattr(obj,'get_batch_topo'):
        #obj is a Dataset
        dataset = obj

        examples = dataset.get_batch_topo(rows*cols)
    else:
        #obj is a Model
        model = obj
        from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
        theano_rng = RandomStreams(42)
        design_examples_var = model.random_design_matrix(batch_size = rows * cols, theano_rng = theano_rng)
        from theano import function
        print 'compiling sampling function'
        f = function([],design_examples_var)
        print 'sampling'
        design_examples = f()
        print 'loading dataset'
        dataset = yaml_parse.load(model.dataset_yaml_src)
        examples = dataset.get_topological_view(design_examples)

    norms = N.asarray( [
            N.sqrt(N.sum(N.square(examples[i,:])))
                        for i in xrange(examples.shape[0])
                        ])
    print 'norms of examples: '
    print '\tmin: ',norms.min()
    print '\tmean: ',norms.mean()
    print '\tmax: ',norms.max()

    print 'range of elements of examples',(examples.min(),examples.max())
    print 'dtype: ', examples.dtype

    examples = dataset.adjust_for_viewer(examples)

    if global_rescale:
        examples /= N.abs(examples).max()

    if len(examples.shape) != 4:
        print 'sorry, view_examples.py only supports image examples for now.'
        print 'this dataset has '+str(len(examples.shape)-2)+' topological dimensions'
        quit(-1)

    is_color = False
    assert examples.shape[3] == 2

    print examples.shape[1:3]

    pv = patch_viewer.PatchViewer( (rows, cols * 2), examples.shape[1:3], is_color = is_color)

    for i in xrange(rows*cols):
        # Load patches in backwards order for easier cross-eyed viewing
        # (Ian can't do the magic eye thing where you focus your eyes past the screen, must
        # focus eyes in front of screen)
        pv.add_patch(examples[i,:,:,1], activation = 0.0, rescale = patch_rescale)
        pv.add_patch(examples[i,:,:,0], activation = 0.0, rescale = patch_rescale)

    if out is None:
        pv.show()
    else:
        pv.save(out)
Ejemplo n.º 25
0
def main(options, positional_args):
    assert len(positional_args) == 1

    path, = positional_args

    out = options.out
    rescale = options.rescale

    if rescale == "none":
        global_rescale = False
        patch_rescale = False
    elif rescale == "global":
        global_rescale = True
        patch_rescale = False
    elif rescale == "individual":
        global_rescale = False
        patch_rescale = True
    else:
        assert False

    if path.endswith(".pkl"):
        from pylearn2.utils import serial

        obj = serial.load(path)
    elif path.endswith(".yaml"):
        print "Building dataset from yaml..."
        obj = yaml_parse.load_path(path)
        print "...done"
    else:
        obj = yaml_parse.load(path)

    rows = options.rows
    cols = options.cols

    if hasattr(obj, "get_batch_topo"):
        # obj is a Dataset
        dataset = obj

        examples = dataset.get_batch_topo(rows * cols)
    else:
        # obj is a Model
        model = obj
        from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams

        theano_rng = RandomStreams(42)
        design_examples_var = model.random_design_matrix(batch_size=rows * cols, theano_rng=theano_rng)
        from theano import function

        print "compiling sampling function"
        f = function([], design_examples_var)
        print "sampling"
        design_examples = f()
        print "loading dataset"
        dataset = yaml_parse.load(model.dataset_yaml_src)
        examples = dataset.get_topological_view(design_examples)

    norms = N.asarray([N.sqrt(N.sum(N.square(examples[i, :]))) for i in xrange(examples.shape[0])])
    print "norms of examples: "
    print "\tmin: ", norms.min()
    print "\tmean: ", norms.mean()
    print "\tmax: ", norms.max()

    print "range of elements of examples", (examples.min(), examples.max())
    print "dtype: ", examples.dtype

    examples = dataset.adjust_for_viewer(examples)

    if global_rescale:
        examples /= N.abs(examples).max()

    if len(examples.shape) != 4:
        print "sorry, view_examples.py only supports image examples for now."
        print "this dataset has " + str(len(examples.shape) - 2) + " topological dimensions"
        quit(-1)
    #

    if examples.shape[3] == 1:
        is_color = False
    elif examples.shape[3] == 3:
        is_color = True
    else:
        print "got unknown image format with " + str(examples.shape[3]) + " channels"
        print "supported formats are 1 channel greyscale or three channel RGB"
        quit(-1)
    #

    print examples.shape[1:3]

    pv = patch_viewer.PatchViewer((rows, cols), examples.shape[1:3], is_color=is_color)

    for i in xrange(rows * cols):
        pv.add_patch(examples[i, :, :, :], activation=0.0, rescale=patch_rescale)
    #

    if out is None:
        pv.show()
    else:
        pv.save(out)
Ejemplo n.º 26
0
    def __init__(self,
                 path='train.csv',
                 task='classification',
                 expect_labels=True,
                 expect_headers=True,
                 delimiter=',',
                 start=None,
                 stop=None,
                 start_fraction=None,
                 end_fraction=None,
                 yaml_src=None,
                 one_hot=True,
                 num_classes=4,
                 which_set=None):
        """
        .. todo:: ..

            WRITEME
        """
        self.path = path
        self.task = task
        self.expect_labels = expect_labels
        self.expect_headers = expect_headers
        self.delimiter = delimiter
        if which_set is not None:
            self.start = start
            self.stop = stop
        self.start_fraction = start_fraction
        self.end_fraction = end_fraction

        self.view_converter = None

        if yaml_src is not None:
            self.yaml_src = yaml_parse.load_path(yaml_src)
        # self.yaml_src=yaml_parse.load_path("mlp.yaml")
        # eventually; triple-quoted yaml...
        self.one_hot = one_hot
        self.num_classes = num_classes

        if which_set is not None and which_set not in[
                                                     'train', 'test', 'valid']:
            raise ValueError(
                'Unrecognized which_set value "%s".' % (which_set,) +
                '". Valid values are ["train","test","valid"].')
        else:
            self.which_set = which_set
            if self.start is not None or self.stop is not None:
                raise ValueError("Use start/stop or which_set,"
                    " just not together.")

        if task not in ['classification', 'regression']:
            raise ValueError('task must be either "classification" or '
                             '"regression"; got ' + str(task))

        if start_fraction is not None:
            if end_fraction is not None:
                raise ValueError("Use start_fraction or end_fraction, "
                                 " not both.")
            if start_fraction <= 0:
                raise ValueError("start_fraction should be > 0")

            if start_fraction >= 1:
                raise ValueError("start_fraction should be < 1")

        if end_fraction is not None:
            if end_fraction <= 0:
                raise ValueError("end_fraction should be > 0")

            if end_fraction >= 1:
                raise ValueError("end_fraction should be < 1")

        if start is not None:
            if start_fraction is not None or end_fraction is not None:
                raise ValueError("Use start, start_fraction, or end_fraction,"
                                 " just not together.")

        if stop is not None:
            if start_fraction is not None or end_fraction is not None:
                raise ValueError("Use stop, start_fraction, or end_fraction,"
                                 " just not together.")

        # and go
        self.path = preprocess(self.path)
        X, y = self._load_data()

        # y=y.astype(int)
        # y=map(int, np.rint(y).astype(int))

        if self.task == 'regression':
            super(CSVDatasetPlus, self).__init__(X=X, y=y)
        else:
            # , y_labels=4 # y_labels=np.max(y)+1
            super(CSVDatasetPlus, self).__init__(
                X=X, y=y.astype(int), y_labels=self.num_classes)
Ejemplo n.º 27
0
import numpy as np
import theano.tensor as T
from pylearn2.models.s3c import SufficientStatistics
from pylearn2.models.s3c import S3C
from theano import function
import time

model_path, e_step_path, data_path, batch_size_str, prefix = sys.argv[1:]

print 'loading model'
model = serial.load(model_path)
model.make_pseudoparams()
assert isinstance(model, S3C)

print 'building e step'
e_step = yaml_parse.load_path(e_step_path)

print 'installing e step'
model.e_step = e_step
e_step.register_model(model)

print 'loading data'
data = np.load(data_path)
m, n = data.shape

print 'batch_size: ', batch_size_str
batch_size = int(batch_size_str)
assert m % batch_size == 0

print 'building energy functional expression'
V = T.matrix()
Ejemplo n.º 28
0
def main(options, positional_args):
    """
    .. todo::

        WRITEME
    """
    assert len(positional_args) == 1

    path ,= positional_args

    out = options.out
    rescale = options.rescale

    if rescale == 'none':
        global_rescale = False
        patch_rescale = False
    elif rescale == 'global':
        global_rescale = True
        patch_rescale = False
    elif rescale == 'individual':
        global_rescale = False
        patch_rescale = True
    else:
        assert False

    if path.endswith('.pkl'):
        from pylearn2.utils import serial
        obj = serial.load(path)
    elif path.endswith('.yaml'):
        print 'Building dataset from yaml...'
        obj =yaml_parse.load_path(path)
        print '...done'
    else:
        obj = yaml_parse.load(path)

    rows = options.rows
    cols = options.cols

    if hasattr(obj,'get_batch_topo'):
        # obj is a Dataset
        dataset = obj

        examples = dataset.get_batch_topo(rows*cols)
    else:
        # obj is a Model
        model = obj
        from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
        theano_rng = RandomStreams(42)
        design_examples_var = model.random_design_matrix(batch_size =
                rows * cols, theano_rng = theano_rng)
        from theano import function
        print 'compiling sampling function'
        f = function([],design_examples_var)
        print 'sampling'
        design_examples = f()
        print 'loading dataset'
        dataset = yaml_parse.load(model.dataset_yaml_src)
        examples = dataset.get_topological_view(design_examples)

    norms = N.asarray( [
            N.sqrt(N.sum(N.square(examples[i,:])))
                        for i in xrange(examples.shape[0])
                        ])
    print 'norms of examples: '
    print '\tmin: ',norms.min()
    print '\tmean: ',norms.mean()
    print '\tmax: ',norms.max()

    print 'range of elements of examples', \
            (examples.min(),examples.max())
    print 'dtype: ', examples.dtype

    examples = dataset.adjust_for_viewer(examples)

    if global_rescale:
        examples /= N.abs(examples).max()

    if len(examples.shape) != 4:
        print 'sorry, view_examples.py only supports image examples' + \
                'for now.'
        print 'this dataset has ' + \
                str(len(examples.shape)-2)+' topological dimensions'
        quit(-1)

    if examples.shape[3] == 1:
        is_color = False
    elif examples.shape[3] == 3:
        is_color = True
    else:
        print 'got unknown image format with ' + str(examples.shape[3]) + \
                ' channels'
        print 'supported formats are 1 channel greyscale or three channel RGB'
        quit(-1)

    print examples.shape[1:3]

    pv = patch_viewer.PatchViewer((rows, cols), examples.shape[1:3],
            is_color = is_color)

    for i in xrange(rows*cols):
        pv.add_patch(examples[i,:,:,:], activation=0.0, rescale=patch_rescale)

    if out is None:
        pv.show()
    else:
        pv.save(out)
Ejemplo n.º 29
0
#!/bin/env python
import numpy as N
import sys
from pylearn2.gui import patch_viewer
from pylearn2.config import yaml_parse

assert len(sys.argv) == 2
path = sys.argv[1]

if path.endswith('.pkl'):
    from pylearn2.utils import serial
    dataset = serial.load(path)
elif path.endswith('.yaml'):
    dataset =yaml_parse.load_path(path)
else:
    dataset = yaml_parse.load(path)

rows = 20
cols = 20

examples = dataset.get_batch_topo(rows*cols)

norms = N.asarray( [
        N.sqrt(N.sum(N.square(examples[i,:])))
                    for i in xrange(examples.shape[0])
                    ])
print 'norms of exmaples: '
print '\tmin: ',norms.min()
print '\tmean: ',norms.mean()
print '\tmax: ',norms.max()
Ejemplo n.º 30
0
from pylearn2.config import yaml_parse
import sys

dataset = yaml_parse.load_path(sys.argv[1])

var = dataset.X.var(axis=0)

print (var.min(), var.mean(), var.max())

mn = dataset.X.mean(axis=0)

print (mn.min(), mn.mean(), mn.max())
Ejemplo n.º 31
0
            print((t6-t1, t2-t1, t3-t2, t4-t3, t5-t4, t6-t5))

        if self.chunk_size is not None:
            assert save_path.endswith('.npy')
            save_path_pieces = save_path.split('.npy')
            assert len(save_path_pieces) == 2
            assert save_path_pieces[1] == ''
            save_path = save_path_pieces[0] + '_' + chr(ord('A')+self.chunk_id)+'.npy'
        np.save(save_path,output)


        if nan > 0:
            warnings.warn(str(nan)+' features were nan')

if __name__ == '__main__':
    assert len(sys.argv) == 2
    yaml_path = sys.argv[1]

    assert yaml_path.endswith('.yaml')
    val = yaml_path[0:-len('.yaml')]
    os.environ['FEATURE_EXTRACTOR_YAML_PATH'] = val
    os.putenv('FEATURE_EXTRACTOR_YAML_PATH',val)
    val = val.split('/')[-1]
    os.environ['FEATURE_EXTRACTOR_YAML_NAME'] = val
    os.putenv('FEATURE_EXTRACTOR_YAML_NAME', val)


    extractor = yaml_parse.load_path(yaml_path)

    extractor()
def main(options, positional_args):
    """
    .. todo::

        WRITEME
    """
    assert len(positional_args) == 1

    path, = positional_args

    out = options.out
    rescale = options.rescale

    if rescale == 'none':
        global_rescale = False
        patch_rescale = False
    elif rescale == 'global':
        global_rescale = True
        patch_rescale = False
    elif rescale == 'individual':
        global_rescale = False
        patch_rescale = True
    else:
        assert False

    if path.endswith('.pkl'):
        from pylearn2.utils import serial
        obj = serial.load(path)
    elif path.endswith('.yaml'):
        print 'Building dataset from yaml...'
        obj = yaml_parse.load_path(path)
        print '...done'
    else:
        obj = yaml_parse.load(path)

    rows = options.rows
    cols = options.cols

    if hasattr(obj, 'get_batch_topo'):
        #obj is a Dataset
        dataset = obj

        examples = dataset.get_batch_topo(rows * cols)
    else:
        #obj is a Model
        model = obj
        from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
        theano_rng = RandomStreams(42)
        design_examples_var = model.random_design_matrix(batch_size=rows *
                                                         cols,
                                                         theano_rng=theano_rng)
        from theano import function
        print 'compiling sampling function'
        f = function([], design_examples_var)
        print 'sampling'
        design_examples = f()
        print 'loading dataset'
        dataset = yaml_parse.load(model.dataset_yaml_src)
        examples = dataset.get_topological_view(design_examples)

    norms = N.asarray([
        N.sqrt(N.sum(N.square(examples[i, :])))
        for i in xrange(examples.shape[0])
    ])
    print 'norms of examples: '
    print '\tmin: ', norms.min()
    print '\tmean: ', norms.mean()
    print '\tmax: ', norms.max()

    print 'range of elements of examples', (examples.min(), examples.max())
    print 'dtype: ', examples.dtype

    examples = dataset.adjust_for_viewer(examples)

    if global_rescale:
        examples /= N.abs(examples).max()

    if len(examples.shape) != 4:
        print 'sorry, view_examples.py only supports image examples for now.'
        print 'this dataset has ' + str(len(examples.shape) -
                                        2) + ' topological dimensions'
        quit(-1)

    is_color = False
    assert examples.shape[3] == 2

    print examples.shape[1:3]

    pv = patch_viewer.PatchViewer((rows, cols * 2),
                                  examples.shape[1:3],
                                  is_color=is_color)

    for i in xrange(rows * cols):
        # Load patches in backwards order for easier cross-eyed viewing
        # (Ian can't do the magic eye thing where you focus your eyes past the screen, must
        # focus eyes in front of screen)
        pv.add_patch(examples[i, :, :, 1],
                     activation=0.0,
                     rescale=patch_rescale)
        pv.add_patch(examples[i, :, :, 0],
                     activation=0.0,
                     rescale=patch_rescale)

    if out is None:
        pv.show()
    else:
        pv.save(out)
Ejemplo n.º 33
0
def main(options, positional_args):
    assert len(positional_args) == 1

    path ,= positional_args

    out = options.out
    rescale = options.rescale

    if rescale == 'none':
        global_rescale = False
        patch_rescale = False
    elif rescale == 'global':
        global_rescale = True
        patch_rescale = False
    elif rescale == 'individual':
        global_rescale = False
        patch_rescale = True
    else:
        assert False

    if path.endswith('.pkl'):
        from pylearn2.utils import serial
        obj = serial.load(path)
    elif path.endswith('.yaml'):
        print 'Building dataset from yaml...'
        obj =yaml_parse.load_path(path)
        print '...done'
    else:
        obj = yaml_parse.load(path)

    rows = options.rows
    cols = options.cols

    if hasattr(obj,'get_batch_topo'):
        #obj is a Dataset
        dataset = obj

        examples = dataset.get_batch_topo(rows*cols)
    else:
        #obj is a Model
        model = obj
        from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
        theano_rng = RandomStreams(42)
        design_examples_var = model.random_design_matrix(batch_size = rows * cols, theano_rng = theano_rng)
        from theano import function
        print 'compiling sampling function'
        f = function([],design_examples_var)
        print 'sampling'
        design_examples = f()
        print 'loading dataset'
        dataset = yaml_parse.load(model.dataset_yaml_src)
        examples = dataset.get_topological_view(design_examples)

    norms = N.asarray( [
            N.sqrt(N.sum(N.square(examples[i,:])))
                        for i in xrange(examples.shape[0])
                        ])
    print 'norms of examples: '
    print '\tmin: ',norms.min()
    print '\tmean: ',norms.mean()
    print '\tmax: ',norms.max()

    print 'range of elements of examples',(examples.min(),examples.max())
    print 'dtype: ', examples.dtype

    examples = dataset.adjust_for_viewer(examples)

    if global_rescale:
        examples /= N.abs(examples).max()

    if len(examples.shape) != 4:
        print 'sorry, view_examples.py only supports image examples for now.'
        print 'this dataset has '+str(len(examples.shape)-2)+' topological dimensions'
        quit(-1)
    #

    if examples.shape[3] == 1:
        is_color = False
    elif examples.shape[3] == 3:
        is_color = True
    else:
        print 'got unknown image format with '+str(examples.shape[3])+' channels'
        print 'supported formats are 1 channel greyscale or three channel RGB'
        quit(-1)
    #

    print examples.shape[1:3]

    pv = patch_viewer.PatchViewer( (rows, cols), examples.shape[1:3], is_color = is_color)

    for i in xrange(rows*cols):
        pv.add_patch(examples[i,:,:,:], activation = 0.0, rescale = patch_rescale)
    #

    if out is None:
        pv.show()
    else:
        pv.save(out)
Ejemplo n.º 34
0
import gc
import numpy as np
import sys

from pylearn2.config import yaml_parse
from pylearn2.utils import serial

_, config_path = sys.argv
model = yaml_parse.load_path(config_path)

f = model.dump_func()

model.strip_down()
stripped_model_path = config_path.replace('.yaml', '_stripped.pkl')
serial.save(stripped_model_path, model)

srcs = {
        'train' : """!obj:pylearn2.datasets.norb_small.FoveatedNORB {
        which_set: "train",
        scale: 1,
        one_hot: 1
    }""",
        'test' : """!obj:pylearn2.datasets.norb_small.FoveatedNORB {
        which_set: "test",
        scale: 1,
        one_hot: 1
    }"""
        }

for which_set in srcs:
    gc.collect()
Ejemplo n.º 35
0
#!/bin/env python
import numpy as N
import sys
from pylearn2.gui import patch_viewer
from pylearn2.config import yaml_parse

assert len(sys.argv) == 2
path = sys.argv[1]

if path.endswith('.pkl'):
    from pylearn2.utils import serial
    dataset = serial.load(path)
elif path.endswith('.yaml'):
    dataset = yaml_parse.load_path(path)
else:
    dataset = yaml_parse.load(path)

rows = 20
cols = 20

examples = dataset.get_batch_topo(rows * cols)

norms = N.asarray([
    N.sqrt(N.sum(N.square(examples[i, :]))) for i in xrange(examples.shape[0])
])
print 'norms of exmaples: '
print '\tmin: ', norms.min()
print '\tmean: ', norms.mean()
print '\tmax: ', norms.max()

print 'range of elements of examples', (examples.min(), examples.max())
Ejemplo n.º 36
0
import gc
import numpy as np
import sys

from pylearn2.config import yaml_parse
from pylearn2.utils import serial

_, config_path = sys.argv
model = yaml_parse.load_path(config_path)

f = model.dump_func()

model.strip_down()
stripped_model_path = config_path.replace('.yaml', '_stripped.pkl')
serial.save(stripped_model_path, model)

srcs = {
    'train':
    """!obj:pylearn2.datasets.norb_small.FoveatedNORB {
        which_set: "train",
        scale: 1,
        one_hot: 1
    }""",
    'test':
    """!obj:pylearn2.datasets.norb_small.FoveatedNORB {
        which_set: "test",
        scale: 1,
        one_hot: 1
    }"""
}
Ejemplo n.º 37
0
import numpy as np
import theano.tensor as T
from pylearn2.models.s3c import SufficientStatistics
from pylearn2.models.s3c import S3C
from theano import function
import time

model_path, e_step_path, data_path, batch_size_str, prefix = sys.argv[1:]

print 'loading model'
model = serial.load(model_path)
model.make_pseudoparams()
assert isinstance(model, S3C)

print 'building e step'
e_step = yaml_parse.load_path(e_step_path)

print 'installing e step'
model.e_step = e_step
e_step.register_model(model)

print 'loading data'
data = np.load(data_path)
m,n = data.shape

print 'batch_size: ',batch_size_str
batch_size = int(batch_size_str)
assert m % batch_size == 0

print 'building energy functional expression'
V = T.matrix()
Ejemplo n.º 38
0
def show_examples(path, rows, cols, rescale='global', out=None):
    """
    .. todo::

        WRITEME

    Parameters
    ----------
    path : string
        The pickle or YAML file to show examples of
    rows : int
        WRITEME
    cols : int
        WRITEME
    rescale : {'rescale', 'global', 'individual'}
        Default is 'rescale', WRITEME
    out : string, optional
        WRITEME
    """

    if rescale == 'none':
        global_rescale = False
        patch_rescale = False
    elif rescale == 'global':
        global_rescale = True
        patch_rescale = False
    elif rescale == 'individual':
        global_rescale = False
        patch_rescale = True

    if path.endswith('.pkl'):
        from pylearn2.utils import serial
        obj = serial.load(path)
    elif path.endswith('.yaml'):
        print 'Building dataset from yaml...'
        obj = yaml_parse.load_path(path)
        print '...done'
    else:
        obj = yaml_parse.load(path)

    if hasattr(obj, 'get_batch_topo'):
        # obj is a Dataset
        dataset = obj

        examples = dataset.get_batch_topo(rows * cols)
    else:
        # obj is a Model
        model = obj
        from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
        theano_rng = RandomStreams(42)
        design_examples_var = model.random_design_matrix(batch_size=rows *
                                                         cols,
                                                         theano_rng=theano_rng)
        from theano import function
        print 'compiling sampling function'
        f = function([], design_examples_var)
        print 'sampling'
        design_examples = f()
        print 'loading dataset'
        dataset = yaml_parse.load(model.dataset_yaml_src)
        examples = dataset.get_topological_view(design_examples)

    norms = np.asarray([
        np.sqrt(np.sum(np.square(examples[i, :])))
        for i in xrange(examples.shape[0])
    ])
    print 'norms of examples: '
    print '\tmin: ', norms.min()
    print '\tmean: ', norms.mean()
    print '\tmax: ', norms.max()

    print 'range of elements of examples', (examples.min(), examples.max())
    print 'dtype: ', examples.dtype

    examples = dataset.adjust_for_viewer(examples)

    if global_rescale:
        examples /= np.abs(examples).max()

    if len(examples.shape) != 4:
        print 'sorry, view_examples.py only supports image examples for now.'
        print 'this dataset has ' + str(len(examples.shape) - 2),
        print 'topological dimensions'
        quit(-1)

    if examples.shape[3] == 1:
        is_color = False
    elif examples.shape[3] == 3:
        is_color = True
    else:
        print 'got unknown image format with ' + str(examples.shape[3]),
        print 'channels'
        print 'supported formats are 1 channel greyscale or three channel RGB'
        quit(-1)

    print examples.shape[1:3]

    pv = patch_viewer.PatchViewer((rows, cols),
                                  examples.shape[1:3],
                                  is_color=is_color)

    for i in xrange(rows * cols):
        pv.add_patch(examples[i, :, :, :],
                     activation=0.0,
                     rescale=patch_rescale)

    if out is None:
        pv.show()
    else:
        pv.save(out)
Ejemplo n.º 39
0
def show_examples(path, rows, cols, rescale='global', out=None):
    """
    .. todo::

        WRITEME

    Parameters
    ----------
    path : string
        The pickle or YAML file to show examples of
    rows : int
        WRITEME
    cols : int
        WRITEME
    rescale : {'rescale', 'global', 'individual'}
        Default is 'rescale', WRITEME
    out : string, optional
        WRITEME
    """

    if rescale == 'none':
        global_rescale = False
        patch_rescale = False
    elif rescale == 'global':
        global_rescale = True
        patch_rescale = False
    elif rescale == 'individual':
        global_rescale = False
        patch_rescale = True

    if path.endswith('.pkl'):
        from pylearn2.utils import serial
        obj = serial.load(path)
    elif path.endswith('.yaml'):
        print('Building dataset from yaml...')
        obj = yaml_parse.load_path(path)
        print('...done')
    else:
        obj = yaml_parse.load(path)

    if hasattr(obj, 'get_batch_topo'):
        # obj is a Dataset
        dataset = obj

        examples = dataset.get_batch_topo(rows*cols)
    else:
        # obj is a Model
        model = obj
        from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
        theano_rng = RandomStreams(42)
        design_examples_var = model.random_design_matrix(
            batch_size=rows * cols, theano_rng=theano_rng
        )
        from theano import function
        print('compiling sampling function')
        f = function([], design_examples_var)
        print('sampling')
        design_examples = f()
        print('loading dataset')
        dataset = yaml_parse.load(model.dataset_yaml_src)
        examples = dataset.get_topological_view(design_examples)

    norms = np.asarray([np.sqrt(np.sum(np.square(examples[i, :])))
                        for i in xrange(examples.shape[0])])
    print('norms of examples: ')
    print('\tmin: ', norms.min())
    print('\tmean: ', norms.mean())
    print('\tmax: ', norms.max())

    print('range of elements of examples', (examples.min(), examples.max()))
    print('dtype: ', examples.dtype)

    examples = dataset.adjust_for_viewer(examples)

    if global_rescale:
        examples /= np.abs(examples).max()

    if len(examples.shape) != 4:
        print('sorry, view_examples.py only supports image examples for now.')
        print('this dataset has ' + str(len(examples.shape) - 2), end='')
        print('topological dimensions')
        quit(-1)

    if examples.shape[3] == 1:
        is_color = False
    elif examples.shape[3] == 3:
        is_color = True
    else:
        print('got unknown image format with', str(examples.shape[3]), end='')
        print('channels')
        print('supported formats are 1 channel greyscale or three channel RGB')
        quit(-1)

    print(examples.shape[1:3])

    pv = patch_viewer.PatchViewer((rows, cols), examples.shape[1:3],
                                  is_color=is_color)

    for i in xrange(rows*cols):
        pv.add_patch(examples[i, :, :, :], activation=0.0,
                     rescale=patch_rescale)

    if out is None:
        pv.show()
    else:
        pv.save(out)
Ejemplo n.º 40
0
import sys

ignore, N = sys.argv
N = int(N)

dataset_desc_path = 'cifar100_patches.yaml'

from pylearn2.config import yaml_parse
dataset = yaml_parse.load_path(dataset_desc_path)

from pylearn2.models.s3c import S3C, E_Step, Grad_M_Step, E_Step_Scan

NUM_EXAMPLES = 100
OVERKILL = 50

X = dataset.get_batch_design(NUM_EXAMPLES)

D = X.shape[1]

import numpy as np
rng = np.random.RandomState([7, 12, 43])
init_bias_hid = rng.uniform(-5., 0., (N, ))
init_mu = rng.uniform(-5., 5., (N, ))
init_alpha = rng.uniform(.1, 15., (N, ))
init_B = rng.uniform(3.5, 15., (D, ))

from pylearn2.utils.call_check import checked_call


def package_call(to_call, **kwargs):
    return checked_call(to_call, kwargs)
Ejemplo n.º 41
0
from pylearn2.config import yaml_parse

if __name__ == "__main__":
   clf = yaml_parse.load_path("conv.yaml")
   clf.main_loop()  
Ejemplo n.º 42
0
import sys

ignore, N  = sys.argv
N = int(N)

dataset_desc_path = 'cifar100_patches.yaml'

from pylearn2.config import yaml_parse
dataset = yaml_parse.load_path(dataset_desc_path)


from pylearn2.models.s3c import S3C, E_Step, Grad_M_Step, E_Step_Scan

NUM_EXAMPLES = 100
OVERKILL = 50

X = dataset.get_batch_design(NUM_EXAMPLES)

D = X.shape[1]

import numpy as np
rng = np.random.RandomState([7,12,43])
init_bias_hid = rng.uniform(-5.,0.,(N,))
init_mu = rng.uniform(-5.,5.,(N,))
init_alpha = rng.uniform(.1,15.,(N,))
init_B = rng.uniform(3.5,15.,(D,))

from pylearn2.utils.call_check import checked_call

def package_call(to_call, ** kwargs):
    return checked_call(to_call, kwargs)
Ejemplo n.º 43
0
    patch_rescale = False
elif rescale == 'global':
    global_rescale = True
    patch_rescale = False
elif rescale == 'individual':
    global_rescale = False
    patch_rescale = True
else:
    assert False

if path.endswith('.pkl'):
    from pylearn2.utils import serial
    obj = serial.load(path)
elif path.endswith('.yaml'):
    print 'Building dataset from yaml...'
    obj =yaml_parse.load_path(path)
    print '...done'
else:
    obj = yaml_parse.load(path)

rows = options.rows
cols = options.cols

if hasattr(obj,'get_batch_topo'):
    #obj is a Dataset
    dataset = obj

    examples = dataset.get_batch_topo(rows*cols)
else:
    #obj is a Model
    model = obj