Example #1
0
class PyRBMFeatureExtractor(BaseEstimator, TransformerMixin):
    def __init__(self,
                 n_hidden=500,
                 binary=True,
                 learning_rate=0.1,
                 momentum=0.2,
                 l2_weight=0.001,
                 sparsity=0.1,
                 n_training_epochs=10,
                 scale=0.001,
                 reconstruction=True,
                 n_gibbs=10,
                 batch_size=20):

        self.n_hidden = n_hidden
        self.binary = binary
        self.momentum = momentum
        self.learning_rate = learning_rate
        self.l2_weight = l2_weight
        self.sparsity = sparsity
        self.batch_size = batch_size
        self.scale = scale
        self.n_training_epochs = n_training_epochs
        self.reconstruction = reconstruction
        self.n_gibbs = n_gibbs

        super(PyRBMFeatureExtractor, self).__init__()

    def fit(self, X, y=None):
        n_features = X.shape[1]

        self.rbm = RBM(n_features, self.n_hidden, self.binary, self.scale)

        trainer = Trainer(self.rbm,
                          l2=self.l2_weight,
                          momentum=self.momentum,
                          target_sparsity=self.sparsity)

        for i in range(self.n_training_epochs):
            for j, batch in enumerate(chunks(X, self.batch_size)):
                #log.debug("Training rbm on epoch %s batch %s" % (i, j))
                trainer.learn(batch, learning_rate=self.learning_rate)

        return self

    def transform(self, X, y=None):
        if self.reconstruction:
            return self.rbm.reconstruct(X, self.n_gibbs)
        else:
            return self.rbm.hidden_expectation(X)
Example #2
0
class PyRBMFeatureExtractor(BaseEstimator, TransformerMixin):
    def __init__(
        self,
            n_hidden=500, binary=True,
            learning_rate=0.1, momentum=0.2,
            l2_weight=0.001, sparsity=0.1,
            n_training_epochs=10,
            scale=0.001,
            reconstruction=True,
            n_gibbs=10,
            batch_size=20):

        self.n_hidden = n_hidden
        self.binary = binary
        self.momentum = momentum
        self.learning_rate = learning_rate
        self.l2_weight = l2_weight
        self.sparsity = sparsity
        self.batch_size = batch_size
        self.scale = scale
        self.n_training_epochs = n_training_epochs
        self.reconstruction = reconstruction
        self.n_gibbs = n_gibbs


        super(PyRBMFeatureExtractor, self).__init__()

    def fit(self, X, y=None):
        n_features = X.shape[1]

        self.rbm = RBM(
            n_features, self.n_hidden, self.binary, self.scale)

        trainer = Trainer(
            self.rbm, l2=self.l2_weight, momentum=self.momentum,
            target_sparsity=self.sparsity)

        for i in range(self.n_training_epochs):
            for j, batch in enumerate(chunks(X, self.batch_size)):
                #log.debug("Training rbm on epoch %s batch %s" % (i, j))
                trainer.learn(batch, learning_rate=self.learning_rate)

        return self

    def transform(self, X, y=None):
        if self.reconstruction:
            return self.rbm.reconstruct(X, self.n_gibbs)
        else:
            return self.rbm.hidden_expectation(X)
Example #3
0
    def fit(self, X, y=None):
        n_features = X.shape[1]

        self.rbm = RBM(n_features, self.n_hidden, self.binary, self.scale)

        trainer = Trainer(self.rbm,
                          l2=self.l2_weight,
                          momentum=self.momentum,
                          target_sparsity=self.sparsity)

        for i in range(self.n_training_epochs):
            for j, batch in enumerate(chunks(X, self.batch_size)):
                #log.debug("Training rbm on epoch %s batch %s" % (i, j))
                trainer.learn(batch, learning_rate=self.learning_rate)

        return self
Example #4
0
    def fit(self, X, y=None):
        n_features = X.shape[1]

        self.rbm = RBM(
            n_features, self.n_hidden, self.binary, self.scale)

        trainer = Trainer(
            self.rbm, l2=self.l2_weight, momentum=self.momentum,
            target_sparsity=self.sparsity)

        for i in range(self.n_training_epochs):
            for j, batch in enumerate(chunks(X, self.batch_size)):
                #log.debug("Training rbm on epoch %s batch %s" % (i, j))
                trainer.learn(batch, learning_rate=self.learning_rate)

        return self