Пример #1
0
 def test_round_split(self):
     split = 10
     batch_size = 32
     ntrain, ntest, nin, nout = 100, 10, 10, 5
     data = UniformRandom(ntrain, ntest, nin, nout, validation_pct=split)
     data.backend = CPU(rng_seed=0)
     data.backend.batch_size = batch_size
     data.load()
     split /= 100.0
     nb_batches = ntrain // batch_size
     expected_nb_train = floor((1.0 - split) * nb_batches)
     expected_nb_valid = floor(split * nb_batches)
     assert expected_nb_train == len(data.inputs['train'])
     assert expected_nb_train == len(data.targets['train'])
     assert expected_nb_valid == len(data.inputs['validation'])
     assert expected_nb_valid == len(data.targets['validation'])
Пример #2
0
    def run(self):
        """
        Actually carry out each of the experiment steps.
        """
        if not (hasattr(self.model, 'fprop') and hasattr(self.model, 'bprop')):
            logger.error('Config file not compatible.')
            return

        self.eps = 1e-4
        self.weights = []
        self.trainable_layers = []
        for ind in range(len(self.model.layers)):
            layer = self.model.layers[ind]
            if not (hasattr(layer, 'weights') and hasattr(layer, 'updates')):
                continue
            self.weights.append(layer.backend.copy(layer.weights))
            self.trainable_layers.append(ind)

        if not hasattr(layer, 'dataset'):
            if isinstance(self.model, MLP):
                datashape = (self.model.data_layer.nout,
                             self.model.cost_layer.nin)
            else:
                datashape = (self.model.layers[0].nin,
                             self.model.layers[-1].nout)
            self.dataset = UniformRandom(self.model.batch_size,
                                         self.model.batch_size, datashape[0],
                                         datashape[1])
            self.dataset.backend = self.model.backend
            self.dataset.set_distributed_batch_size(self.model)
            self.dataset.load()
        ds = self.dataset

        if isinstance(self.model, MLP):
            self.model.data_layer.dataset = ds
            self.model.data_layer.use_set('train')
            self.model.fprop()
            self.model.bprop()
            self.model.update(0)

            self.save_state()
            self.model.data_layer.reset_counter()
            self.model.fprop()
            self.model.bprop()
            self.model.update(0)
            self.load_state()
        else:
            inputs = ds.get_batch(ds.get_inputs(train=True)['train'], 0)
            targets = ds.get_batch(ds.get_targets(train=True)['train'], 0)

            self.model.fprop(inputs)
            self.model.bprop(targets, inputs)
            self.model.update(0)

            self.save_state()
            self.model.fprop(inputs)
            self.model.bprop(targets, inputs)
            self.model.update(0)
            self.load_state()

        for ind in self.trainable_layers[::-1]:
            layer = self.model.layers[ind]
            if isinstance(self.model, MLP):
                result = self.check_layerb(layer)
            else:
                result = self.check_layer(layer, inputs, targets)
            if result is False:
                break