예제 #1
0
파일: sgd.py 프로젝트: nitbix/pylearn2
    def _setup_monitor(self):
        """
        Set up monitor to model the objective value, learning rate,
        momentum (if applicable), and extra channels defined by
        the cost.

        This method must be called after `learning_rule.get_updates`,
        since it may have an effect on `learning_rule.add_channels_to_monitor`
        (that is currently the case for `learning_rule.RMSProp`).
        """
        if bool(self.monitoring_dataset):
            if self.monitoring_batch_size is None and self.monitoring_batches is None:
                self.monitoring_batch_size = self.batch_size
                self.monitoring_batches = self.batches_per_iter
            self.monitor.setup(
                dataset=self.monitoring_dataset,
                cost=self.cost,
                batch_size=self.monitoring_batch_size,
                num_batches=self.monitoring_batches,
                extra_costs=self.monitoring_costs,
                mode=self.monitor_iteration_mode,
            )
            dataset_name = first_key(self.monitoring_dataset)
            monitoring_dataset = self.monitoring_dataset[dataset_name]
            # TODO: have Monitor support non-data-dependent channels
            self.monitor.add_channel(
                name="learning_rate",
                ipt=None,
                val=self.learning_rate,
                data_specs=(NullSpace(), ""),
                dataset=monitoring_dataset,
            )

            if self.learning_rule:
                self.learning_rule.add_channels_to_monitor(self.monitor, monitoring_dataset)
예제 #2
0
    def _setup_monitor(self):
        """
        Set up monitor to model the objective value, learning rate,
        momentum (if applicable), and extra channels defined by
        the cost.

        This method must be called after `learning_rule.get_updates`,
        since it may have an effect on `learning_rule.add_channels_to_monitor`
        (that is currently the case for `learning_rule.RMSProp`).
        """
        if bool(self.monitoring_dataset):
            if (self.monitoring_batch_size is None
                    and self.monitoring_batches is None):
                self.monitoring_batch_size = self.batch_size
                self.monitoring_batches = self.batches_per_iter
            self.monitor.setup(dataset=self.monitoring_dataset,
                               cost=self.cost,
                               batch_size=self.monitoring_batch_size,
                               num_batches=self.monitoring_batches,
                               extra_costs=self.monitoring_costs,
                               mode=self.monitor_iteration_mode)
            dataset_name = first_key(self.monitoring_dataset)
            monitoring_dataset = self.monitoring_dataset[dataset_name]
            # TODO: have Monitor support non-data-dependent channels
            self.monitor.add_channel(name='learning_rate',
                                     ipt=None,
                                     val=self.learning_rate,
                                     data_specs=(NullSpace(), ''),
                                     dataset=monitoring_dataset)

            if self.learning_rule:
                self.learning_rule.add_channels_to_monitor(
                    self.monitor, monitoring_dataset)
예제 #3
0
def test_unpickle_key():
    fd, fname = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as f:
        d = ('a', 1)
        cPickle.dump(d, f)
    loaded = load("{!pkl: '%s': 50}" % fname)
    assert_(first_key(loaded) == d)
    assert_(first_value(loaded) == 50)
    os.remove(fname)
예제 #4
0
def test_unpickle_key():
    fd, fname = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as f:
        d = ('a', 1)
        cPickle.dump(d, f)
    loaded = load("{!pkl: '%s': 50}" % fname)
    assert_(first_key(loaded) == d)
    assert_(first_value(loaded) == 50)
    os.remove(fname)
예제 #5
0
def summarize(path):
    """
    Summarize the model

    Parameters
    ----------
    path : str
        The path to the pickled model to summarize
    """
    model = serial.load(path)
    for param in model.get_params():
        name = param.name
        if name is None:
            name = '<anon>'
        v = param.get_value()
        print(name + ': ' + str((v.min(), v.mean(), v.max())), end='')
        print(str(v.shape))
        if np.sign(v.min()) != np.sign(v.max()):
            v = np.abs(v)
            print('abs(' + name + '): ' + str((v.min(), v.mean(), v.max())))
        if v.ndim == 2:
            row_norms = np.sqrt(np.square(v).sum(axis=1))
            print(name + " row norms:", end='')
            print((row_norms.min(), row_norms.mean(), row_norms.max()))
            col_norms = np.sqrt(np.square(v).sum(axis=0))
            print(name + " col norms:", end='')
            print((col_norms.min(), col_norms.mean(), col_norms.max()))

    if hasattr(model, 'monitor'):
        print('trained on', model.monitor.get_examples_seen(), 'examples')
        print('which corresponds to ', end='')
        print(model.monitor.get_batches_seen(), 'batches')
        key = first_key(model.monitor.channels)
        hour = float(model.monitor.channels[key].time_record[-1]) / 3600.
        print('Trained for {0} hours'.format(hour))
        try:
            print(model.monitor.get_epochs_seen(), 'epochs')
        except Exception:
            pass
        if hasattr(model.monitor, 'training_succeeded'):
            if model.monitor.training_succeeded:
                print('Training succeeded')
            else:
                print('Training was not yet completed ' +
                      'at the time of this save.')
        else:
            print('This pickle file is damaged, or was made before the ' +
                  'Monitor tracked whether training completed.')
예제 #6
0
def summarize(path):
    """
    Summarize the model

    Parameters
    ----------
    path : str
        The path to the pickled model to summarize
    """
    model = serial.load(path)
    for param in model.get_params():
        name = param.name
        if name is None:
            name = '<anon>'
        v = param.get_value()
        print(name + ': ' + str((v.min(), v.mean(), v.max())), end='')
        print(str(v.shape))
        if np.sign(v.min()) != np.sign(v.max()):
            v = np.abs(v)
            print('abs(' + name + '): ' + str((v.min(), v.mean(), v.max())))
        if v.ndim == 2:
            row_norms = np.sqrt(np.square(v).sum(axis=1))
            print(name + " row norms:", end='')
            print((row_norms.min(), row_norms.mean(), row_norms.max()))
            col_norms = np.sqrt(np.square(v).sum(axis=0))
            print(name + " col norms:", end='')
            print((col_norms.min(), col_norms.mean(), col_norms.max()))

    if hasattr(model, 'monitor'):
        print('trained on', model.monitor.get_examples_seen(), 'examples')
        print('which corresponds to ', end='')
        print(model.monitor.get_batches_seen(), 'batches')
        key = first_key(model.monitor.channels)
        hour = float(model.monitor.channels[key].time_record[-1]) / 3600.
        print('Trained for {0} hours'.format(hour))
        try:
            print(model.monitor.get_epochs_seen(), 'epochs')
        except Exception:
            pass
        if hasattr(model.monitor, 'training_succeeded'):
            if model.monitor.training_succeeded:
                print('Training succeeded')
            else:
                print('Training was not yet completed ' +
                      'at the time of this save.')
        else:
            print('This pickle file is damaged, or was made before the ' +
                  'Monitor tracked whether training completed.')