Exemple #1
0
    def __init__(self, model, batch_size):

        # Call constructor of superclass
        super(PCD, self).__init__(model=model, batch_size=batch_size)

        self.sampler = RBM_SAMPLER.PersistentGibbsSampler(
            self.rbm, self.batch_size)
 def test_Persistent_Gibbs_sampler(self):
     sys.stdout.write('RBM Sampler -> Performing PersistentGibbsSampler test ... ')
     sys.stdout.flush()
     numx.random.seed(42)
     sampler = Sampler.PersistentGibbsSampler(self.bbrbm, 1)
     probCD1, probCD2, probCS1, probCS2, probCS3, probCS4, sumProbs = self.execute_sampler(sampler, self.num_samples)
     assert numx.all(numx.abs(1.0 / 4.0 - probCD1) < self.epsilon)
     assert numx.all(numx.abs(1.0 / 4.0 - probCD2) < self.epsilon)
     assert numx.all(numx.abs(1.0 / 8.0 - probCS1) < self.epsilon)
     assert numx.all(numx.abs(1.0 / 8.0 - probCS2) < self.epsilon)
     assert numx.all(numx.abs(1.0 / 8.0 - probCS3) < self.epsilon)
     assert numx.all(numx.abs(1.0 / 8.0 - probCS4) < self.epsilon)
     assert numx.all(numx.abs(1.0 - sumProbs) < self.epsilon)
     print('successfully passed!')
     sys.stdout.flush()
    def __init__(self, model, num_chains, data=None):
        """ The constructor initializes the PCD trainer with a given models and data.

        :param model: The models to sample from.
        :type model: Valid models class.

        :param num_chains: The number of chains that should be used.
                           .. Note:: You should use the data's batch size!
        :type num_chains: int

        :param data: Data for initialization, only has effect if the centered gradient is used.
        :type data: numpy array [num. samples x input dim]
        """
        # Call super constructor of CD
        super(PCD, self).__init__(model, data)
        self.sampler = sampler.PersistentGibbsSampler(model, num_chains)