def get_dataless_dataset(model): """ Loads the dataset that model was trained on, without loading data. This is useful if you just need the dataset's metadata, like for formatting views of the model's weights. Parameters ---------- model : Model Returns ------- dataset : Dataset The data-less dataset as described above. """ global yaml_parse global control if yaml_parse is None: from pylearn2.config import yaml_parse if control is None: from pylearn2.datasets import control control.push_load_data(False) try: rval = yaml_parse.load(model.dataset_yaml_src) finally: control.pop_load_data() return rval
def show_negative_chains(model_path): """ Display negative chains. Parameters ---------- model_path: str The path to the model pickle file """ model = serial.load(model_path) try: control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) finally: control.pop_load_data() try: layer_to_chains = model.layer_to_chains except AttributeError: print("This model doesn't have negative chains.") quit(-1) vis_chains = get_vis_chains(layer_to_chains, model, dataset) m = vis_chains.shape[0] grid_shape = get_grid_shape(m) return create_patch_viewer(grid_shape, vis_chains, m)
def setup(): """ Create pickle file with a simple model. """ # tearDown is guaranteed to run pop_load_data. control.push_load_data(False) with open('dbm.pkl', 'wb') as f: dataset = MNIST(which_set='train', start=0, stop=100, binarize=True) vis_layer = BinaryVector(nvis=784, bias_from_marginals=dataset) hid_layer1 = BinaryVectorMaxPool(layer_name='h1', pool_size=1, irange=.05, init_bias=-2., detector_layer_dim=50) hid_layer2 = BinaryVectorMaxPool(layer_name='h2', pool_size=1, irange=.05, init_bias=-2., detector_layer_dim=10) model = DBM(batch_size=20, niter=2, visible_layer=vis_layer, hidden_layers=[hid_layer1, hid_layer2]) model.dataset_yaml_src = """ !obj:pylearn2.datasets.binarizer.Binarizer { raw: !obj:pylearn2.datasets.mnist.MNIST { which_set: "train", start: 0, stop: 100 } } """ model.layer_to_chains = model.make_layer_to_state(1) cPickle.dump(model, f, protocol=cPickle.HIGHEST_PROTOCOL)
def get_dataless_dataset(model): """ Loads the dataset that model was trained on, without loading data. This is useful if you just need the dataset's metadata, like for formatting views of the model's weights. """ global yaml_parse if yaml_parse is None: from pylearn2.config import yaml_parse control.push_load_data(False) try: rval = yaml_parse.load(model.dataset_yaml_src) finally: control.pop_load_data() return rval
def get_weights_report(model_path=None, model=None, rescale='individual', border=False, norm_sort=False, dataset=None): """ Returns a PatchViewer displaying a grid of filter weights Parameters ---------- model_path : str Filepath of the model to make the report on. rescale : str A string specifying how to rescale the filter images: \ 'individual' (default): scale each filter so that it \ uses as much as possible of the dynamic range \ of the display under the constraint that 0 \ is gray and no value gets clipped \ 'global' : scale the whole ensemble of weights \ 'none' : don't rescale dataset: pylearn2.datasets.dataset.Dataset Dataset object to do view conversion for displaying the weights. If \ not provided one will be loaded from the model's dataset_yaml_src. Returns ------- WRITEME """ if model is None: print 'making weights report' print 'loading model' model = serial.load(model_path) print 'loading done' else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale=' + rescale + ", must be 'none', 'global', or 'individual'") if hasattr(model, 'layers'): if isinstance(model.layers[0], mlp_models.PretrainedLayer): model = model.layers[0].layer_content if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] keys = [key for key in model \ if hasattr(model[key], 'ndim') and model[key].ndim == 2] if len(keys) > 2: key = None while key not in keys: logger.info('Which is the weights?') for key in keys: logger.info('\t{0}'.format(key)) key = input() else: key, = keys weights = model[key] norms = np.sqrt(np.square(weights).sum(axis=1)) print 'min norm: ',norms.min() print 'mean norm: ',norms.mean() print 'max norm: ',norms.max() return patch_viewer.make_viewer(weights, is_color=weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] except NotImplementedError: if dataset is None: print 'loading dataset...' control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() print '...done' try: W = model.get_weights()
def get_weights_report(model_path=None, model=None, rescale='individual', border=False, norm_sort=False, dataset=None): """ Returns a PatchViewer displaying a grid of filter weights Parameters: model_path: the filepath of the model to make the report on. rescale: a string specifying how to rescale the filter images 'individual' (default): scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped 'global' : scale the whole ensemble of weights 'none' : don't rescale dataset: a Dataset object to do view conversion for displaying the weights. if not provided one will be loaded from the model's dataset_yaml_src """ if model is None: print 'making weights report' print 'loading model' model = serial.load(model_path) print 'loading done' else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale=' + rescale + ", must be 'none', 'global', or 'individual'") if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] weights, = model.values() norms = np.sqrt(np.square(weights).sum(axis=1)) print 'min norm: ', norms.min() print 'mean norm: ', norms.mean() print 'max norm: ', norms.max() return patch_viewer.make_viewer(weights, is_color=weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] except Exception, e: if dataset is None: print 'loading dataset...' control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() print '...done' if hasattr(model, 'get_weights'): W = model.get_weights() if 'weightsShared' in dir(model): W = model.weightsShared.get_value() if 'W' in dir(model): if hasattr(model.W, '__array__'): warnings.warn( 'model.W is an ndarray; I can figure out how to display this but that seems like a sign of a bad bug' ) W = model.W else: W = model.W.get_value() has_D = False if 'D' in dir(model): has_D = True D = model.D if 'enc_weights_shared' in dir(model): W = model.enc_weights_shared.get_value() if W is None: raise AttributeError( 'model does not have a variable with a name like "W", "weights", etc that pylearn2 recognizes' )
def get_weights_report(model_path=None, model=None, rescale='individual', border=False, norm_sort=False, dataset=None): """ Returns a PatchViewer displaying a grid of filter weights Parameters ---------- model_path : str Filepath of the model to make the report on. rescale : str A string specifying how to rescale the filter images: - 'individual' (default) : scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped - 'global' : scale the whole ensemble of weights - 'none' : don't rescale dataset : pylearn2.datasets.dataset.Dataset Dataset object to do view conversion for displaying the weights. If not provided one will be loaded from the model's dataset_yaml_src. Returns ------- WRITEME """ if model is None: logger.info('making weights report') logger.info('loading model') model = serial.load(model_path) logger.info('loading done') else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale=' + rescale + ", must be 'none', 'global', or 'individual'") if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] keys = [key for key in model \ if hasattr(model[key], 'ndim') and model[key].ndim == 2] if len(keys) > 2: key = None while key not in keys: logger.info('Which is the weights?') for key in keys: logger.info('\t{0}'.format(key)) key = input() else: key, = keys weights = model[key] norms = np.sqrt(np.square(weights).sum(axis=1)) logger.info('min norm: {0}'.format(norms.min())) logger.info('mean norm: {0}'.format(norms.mean())) logger.info('max norm: {0}'.format(norms.max())) return patch_viewer.make_viewer(weights, is_color=weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] except NotImplementedError: if dataset is None: logger.info('loading dataset...') control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() logger.info('...done') try: W = model.get_weights() except AttributeError as e: reraise_as(AttributeError(""" Encountered an AttributeError while trying to call get_weights on a model. This probably means you need to implement get_weights for this model class, but look at the original exception to be sure. If this is an older model class, it may have weights stored as weightsShared, etc. Original exception: """+str(e))) if W is None and weights_view is None: raise ValueError("model doesn't support any weights interfaces") if weights_view is None: weights_format = model.get_weights_format() assert hasattr(weights_format,'__iter__') assert len(weights_format) == 2 assert weights_format[0] in ['v','h'] assert weights_format[1] in ['v','h'] assert weights_format[0] != weights_format[1] if weights_format[0] == 'v': W = W.T h = W.shape[0] if norm_sort: norms = np.sqrt(1e-8+np.square(W).sum(axis=1)) norm_prop = norms / norms.max() weights_view = dataset.get_weights_view(W) assert weights_view.shape[0] == h try: hr, hc = model.get_weights_view_shape() except NotImplementedError: hr = int(np.ceil(np.sqrt(h))) hc = hr if 'hidShape' in dir(model): hr, hc = model.hidShape pv = patch_viewer.PatchViewer(grid_shape=(hr, hc), patch_shape=weights_view.shape[1:3], is_color = weights_view.shape[-1] == 3) if global_rescale: weights_view /= np.abs(weights_view).max() if norm_sort: logger.info('sorting weights by decreasing norm') idx = sorted( range(h), key=lambda l : - norm_prop[l] ) else: idx = range(h) if border: act = 0 else: act = None for i in range(0,h): patch = weights_view[idx[i],...] pv.add_patch(patch, rescale=patch_rescale, activation=act) abs_weights = np.abs(weights_view) logger.info('smallest enc weight magnitude: {0}'.format(abs_weights.min())) logger.info('mean enc weight magnitude: {0}'.format(abs_weights.mean())) logger.info('max enc weight magnitude: {0}'.format(abs_weights.max())) if W is not None: norms = np.sqrt(np.square(W).sum(axis=1)) assert norms.shape == (h,) logger.info('min norm: {0}'.format(norms.min())) logger.info('mean norm: {0}'.format(norms.mean())) logger.info('max norm: {0}'.format(norms.max())) return pv
def get_weights_report(model_path=None, model=None, rescale='individual', border=False, norm_sort=False, dataset=None): """ Returns a PatchViewer displaying a grid of filter weights Parameters ---------- model_path : str Filepath of the model to make the report on. rescale : str A string specifying how to rescale the filter images: - 'individual' (default) : scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped - 'global' : scale the whole ensemble of weights - 'none' : don't rescale dataset : pylearn2.datasets.dataset.Dataset Dataset object to do view conversion for displaying the weights. If not provided one will be loaded from the model's dataset_yaml_src. Returns ------- WRITEME """ print type(dataset) print type(model) if model is None: logger.info('making weights report') logger.info('loading model') model = serial.load(model_path) logger.info('loading done') else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale=' + rescale + ", must be 'none', 'global', or 'individual'") print "model type: " + str(type(model)) if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] keys = [key for key in model \ if hasattr(model[key], 'ndim') and model[key].ndim == 2] if len(keys) > 2: key = None while key not in keys: logger.info('Which is the weights?') for key in keys: logger.info('\t{0}'.format(key)) key = raw_input() else: key, = keys weights = model[key] norms = np.sqrt(np.square(weights).sum(axis=1)) logger.info('min norm: {0}'.format(norms.min())) logger.info('mean norm: {0}'.format(norms.mean())) logger.info('max norm: {0}'.format(norms.max())) return patch_viewer.make_viewer(weights, is_color=weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] print "h:" + str(h) except NotImplementedError: if dataset is None: logger.info('loading dataset...') control.push_load_data(False) dataset_filename = yaml_parse.load(model.dataset_yaml_src) dataset = serial.load(dataset_filename) control.pop_load_data() logger.info('...done') try: W = model.get_weights() except AttributeError, e: raise AttributeError(""" Encountered an AttributeError while trying to call get_weights on a model. This probably means you need to implement get_weights for this model class, but look at the original exception to be sure. If this is an older model class, it may have weights stored as weightsShared, etc. Original exception: """+str(e))
__authors__ = "Ian Goodfellow" __copyright__ = "Copyright 2012, Universite de Montreal" __credits__ = ["Ian Goodfellow"] __license__ = "3-clause BSD" __maintainer__ = "Ian Goodfellow" import sys from pylearn2.utils import serial from pylearn2.datasets import control from pylearn2.config import yaml_parse import numpy as np ignore, model_path = sys.argv model = serial.load(model_path) control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) try: layer_to_chains = model.layer_to_chains except AttributeError: print "This model doesn't have negative chains." quit(-1) vis_chains = layer_to_chains[model.visible_layer] vis_chains = vis_chains.get_value() m = vis_chains.shape[0] r = int(np.sqrt(m)) c = m // r
def get_weights_report(model_path = None, model = None, rescale = 'individual', border = False, norm_sort = False, dataset = None): """ Returns a PatchViewer displaying a grid of filter weights Parameters: model_path: the filepath of the model to make the report on. rescale: a string specifying how to rescale the filter images 'individual' (default): scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped 'global' : scale the whole ensemble of weights 'none' : don't rescale dataset: a Dataset object to do view conversion for displaying the weights. if not provided one will be loaded from the model's dataset_yaml_src """ if model is None: print 'making weights report' print 'loading model' model = serial.load(model_path) print 'loading done' else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale='+rescale+", must be 'none', 'global', or 'individual'") if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] weights ,= model.values() norms = np.sqrt(np.square(weights).sum(axis=1)) print 'min norm: ',norms.min() print 'mean norm: ',norms.mean() print 'max norm: ',norms.max() return patch_viewer.make_viewer(weights, is_color = weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] except Exception, e: if dataset is None: print 'loading dataset...' control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() print '...done' if hasattr(model,'get_weights'): W = model.get_weights() if 'weightsShared' in dir(model): W = model.weightsShared.get_value() if 'W' in dir(model): if hasattr(model.W,'__array__'): warnings.warn('model.W is an ndarray; I can figure out how to display this but that seems like a sign of a bad bug') W = model.W else: W = model.W.get_value() has_D = False if 'D' in dir(model): has_D = True D = model.D if 'enc_weights_shared' in dir(model): W = model.enc_weights_shared.get_value() if W is None: raise AttributeError('model does not have a variable with a name like "W", "weights", etc that pylearn2 recognizes')
def get_weights_report(model_path = None, model = None, rescale = 'individual', border = False, norm_sort = False, dataset = None): """ Returns a PatchViewer displaying a grid of filter weights Parameters: model_path: the filepath of the model to make the report on. rescale: a string specifying how to rescale the filter images 'individual' (default): scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped 'global' : scale the whole ensemble of weights 'none' : don't rescale dataset: a Dataset object to do view conversion for displaying the weights. if not provided one will be loaded from the model's dataset_yaml_src """ if model is None: print 'making weights report' print 'loading model' model = serial.load(model_path) print 'loading done' else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale='+rescale+", must be 'none', 'global', or 'individual'") if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] weights ,= model.values() norms = np.sqrt(np.square(weights).sum(axis=1)) print 'min norm: ',norms.min() print 'mean norm: ',norms.mean() print 'max norm: ',norms.max() return patch_viewer.make_viewer(weights, is_color = weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] except NotImplementedError: if dataset is None: print 'loading dataset...' control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() print '...done' try: W = model.get_weights() except AttributeError, e: raise AttributeError(""" Encountered an AttributeError while trying to call get_weights on a model. This probably means you need to implement get_weights for this model class, but look at the original exception to be sure. If this is an older model class, it may have weights stored as weightsShared, etc. Original exception: """+str(e))
H_prob = model.dbm.inference_procedure.infer_H_hat_one_sided(other_H_hat = G_sample,\ W = model.dbm.W[0], b = model.dbm.bias_vis) from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams theano_rng = RandomStreams(42) H_sample = theano_rng.binomial(size = H_prob.shape, n = 1, p = H_prob, dtype = H_prob.dtype) design_examples_var = model.s3c.random_design_matrix(batch_size = nh * reps, theano_rng = theano_rng, H_sample = H_sample) from theano import function print 'compiling sampling function' f = function([],design_examples_var) print 'sampling' design_examples = f() print 'loading dataset' control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) examples = dataset.get_topological_view(design_examples) examples /= np.abs(examples).max() cols = reps rows = nh assert len(examples.shape) == 4 is_color = examples.shape[3] == 3 pv = patch_viewer.PatchViewer( (rows, cols), examples.shape[1:3], is_color = is_color) for i in xrange(min(examples.shape[0],rows*cols)): pv.add_patch(examples[i,:,:,:], activation = 0.0, rescale = patch_rescale)
def get_weights_report(model_path = None, model = None, rescale = 'individual', border = False, norm_sort = False, dataset = None): """ Returns a PatchViewer displaying a grid of filter weights Parameters: model_path: the filepath of the model to make the report on. rescale: a string specifying how to rescale the filter images 'individual' (default): scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped 'global' : scale the whole ensemble of weights 'none' : don't rescale dataset: a Dataset object to do view conversion for displaying the weights. if not provided one will be loaded from the model's dataset_yaml_src """ if model is None: print 'making weights report' print 'loading model' model = serial.load(model_path) print 'loading done' else: assert model_path is None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale='+rescale+", must be 'none', 'global', or 'individual'") if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] weights ,= model.values() return patch_viewer.make_viewer(weights, is_color = weights.shape[1] % 3 == 0) if dataset is None: print 'loading dataset...' control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() print '...done' W = None if hasattr(model,'get_weights'): W = model.get_weights() if 'weightsShared' in dir(model): W = model.weightsShared.get_value() if 'W' in dir(model): if hasattr(model.W,'__array__'): warnings.warn('model.W is an ndarray; I can figure out how to display this but that seems like a sign of a bad bug') W = model.W else: W = model.W.get_value() has_D = False if 'D' in dir(model): has_D = True D = model.D if 'enc_weights_shared' in dir(model): W = model.enc_weights_shared.get_value() if W is None: raise AttributeError('model does not have a variable with a name like "W", "weights", etc that pylearn2 recognizes') if len(W.shape) == 2: if hasattr(model,'get_weights_format'): weights_format = model.get_weights_format() if hasattr(model, 'weights_format'): weights_format = model.weights_format assert hasattr(weights_format,'__iter__') assert len(weights_format) == 2 assert weights_format[0] in ['v','h'] assert weights_format[1] in ['v','h'] assert weights_format[0] != weights_format[1] if weights_format[0] == 'v': W = W.T h = W.shape[0] if norm_sort: norms = np.sqrt(1e-8+np.square(W).sum(axis=1)) norm_prop = norms / norms.max() hr = int(np.ceil(np.sqrt(h))) hc = hr if 'hidShape' in dir(model): hr, hc = model.hidShape pv = patch_viewer.PatchViewer(grid_shape=(hr,hc), patch_shape=dataset.weights_view_shape()[0:2], is_color = dataset.weights_view_shape()[2] == 3) weights_view = dataset.get_weights_view(W) assert weights_view.shape[0] == h #print 'weights_view shape '+str(weights_view.shape) if global_rescale: weights_view /= np.abs(weights_view).max() if norm_sort: print 'sorting weights by decreasing norm' idx = sorted( range(h), key = lambda l : - norm_prop[l] ) else: idx = range(h) if border: act = 0 else: act = None for i in range(0,h): patch = weights_view[idx[i],...] pv.add_patch( patch, rescale = patch_rescale, activation = act) else: e = model.weights d = model.dec_weights_shared.value h = e.shape[0] if len(e.shape) == 8: raise Exception("get_weights_report doesn't support tiled convolution yet, use the show_weights8 app") if e.shape[4] != 1: raise Exception('weights shape: '+str(e.shape)) shape = e.shape[1:3] dur = e.shape[3] show_dec = id(e) != id(d) pv = patch_viewer.PatchViewer( grid_shape = ((1+show_dec)*h,dur), patch_shape=shape) for i in range(0,h): pv.addVid( e[i,:,:,:,0], rescale = rescale) if show_dec: pv.addVid( d[i,:,:,:,0], rescale = rescale) print 'smallest enc weight magnitude: '+str(np.abs(W).min()) print 'mean enc weight magnitude: '+str(np.abs(W).mean()) print 'max enc weight magnitude: '+str(np.abs(W).max()) norms = np.sqrt(np.square(W).sum(axis=1)) assert norms.shape == (h,) print 'min norm: ',norms.min() print 'mean norm: ',norms.mean() print 'max norm: ',norms.max() return pv
def get_weights_report(model_path=None, model=None, rescale='individual', border=False, norm_sort=False, dataset=None): """ Returns a PatchViewer displaying a grid of filter weights Parameters: model_path: the filepath of the model to make the report on. rescale: a string specifying how to rescale the filter images 'individual' (default): scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped 'global' : scale the whole ensemble of weights 'none' : don't rescale dataset: a Dataset object to do view conversion for displaying the weights. if not provided one will be loaded from the model's dataset_yaml_src """ if model is None: print 'making weights report' print 'loading model' model = serial.load(model_path) print 'loading done' else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale=' + rescale + ", must be 'none', 'global', or 'individual'") if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] weights, = model.values() norms = np.sqrt(np.square(weights).sum(axis=1)) print 'min norm: ', norms.min() print 'mean norm: ', norms.mean() print 'max norm: ', norms.max() return patch_viewer.make_viewer(weights, is_color=weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] except NotImplementedError: if dataset is None: print 'loading dataset...' control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() print '...done' try: W = model.get_weights() except AttributeError, e: raise AttributeError(""" Encountered an AttributeError while trying to call get_weights on a model. This probably means you need to implement get_weights for this model class, but look at the original exception to be sure. If this is an older model class, it may have weights stored as weightsShared, etc. Original exception: """ + str(e))
def get_weights_report(model_path=None, model=None, rescale='individual', border=False, norm_sort=False, dataset=None): """ Returns a PatchViewer displaying a grid of filter weights Parameters ---------- model_path : str Filepath of the model to make the report on. rescale : str A string specifying how to rescale the filter images: - 'individual' (default) : scale each filter so that it uses as much as possible of the dynamic range of the display under the constraint that 0 is gray and no value gets clipped - 'global' : scale the whole ensemble of weights - 'none' : don't rescale dataset : pylearn2.datasets.dataset.Dataset Dataset object to do view conversion for displaying the weights. If not provided one will be loaded from the model's dataset_yaml_src. Returns ------- WRITEME """ if model is None: logger.info('making weights report') logger.info('loading model') model = serial.load(model_path) logger.info('loading done') else: assert model_path is None assert model is not None if rescale == 'none': global_rescale = False patch_rescale = False elif rescale == 'global': global_rescale = True patch_rescale = False elif rescale == 'individual': global_rescale = False patch_rescale = True else: raise ValueError('rescale=' + rescale + ", must be 'none', 'global', or 'individual'") if isinstance(model, dict): #assume this was a saved matlab dictionary del model['__version__'] del model['__header__'] del model['__globals__'] keys = [key for key in model \ if hasattr(model[key], 'ndim') and model[key].ndim == 2] if len(keys) > 2: key = None while key not in keys: logger.info('Which is the weights?') for key in keys: logger.info('\t{0}'.format(key)) key = raw_input() else: key, = keys weights = model[key] norms = np.sqrt(np.square(weights).sum(axis=1)) logger.info('min norm: {0}'.format(norms.min())) logger.info('mean norm: {0}'.format(norms.mean())) logger.info('max norm: {0}'.format(norms.max())) return patch_viewer.make_viewer(weights, is_color=weights.shape[1] % 3 == 0) weights_view = None W = None try: weights_view = model.get_weights_topo() h = weights_view.shape[0] except NotImplementedError: if dataset is None: logger.info('loading dataset...') control.push_load_data(False) dataset = yaml_parse.load(model.dataset_yaml_src) control.pop_load_data() logger.info('...done') try: W = model.get_weights() except AttributeError, e: raise AttributeError(""" Encountered an AttributeError while trying to call get_weights on a model. This probably means you need to implement get_weights for this model class, but look at the original exception to be sure. If this is an older model class, it may have weights stored as weightsShared, etc. Original exception: """+str(e))