def __init__(self, n_visible, n_hidden):
     super(GaussianBinaryRBM, self).__init__()
     # data shape
     self.n_visible = n_visible
     self.n_hidden = n_hidden
     # units
     self.v = units.GaussianUnits(self, name='v')  # visibles
     self.h = units.BinaryUnits(self, name='h')  # hiddens
     # parameters
     parameters.FixedBiasParameters(self, self.v.precision_units)
     self.W = parameters.ProdParameters(self, [self.v, self.h],
                                        theano.shared(
                                            value=self._initial_W(),
                                            name='W'),
                                        name='W')  # weights
     self.bv = parameters.BiasParameters(self,
                                         self.v,
                                         theano.shared(
                                             value=self._initial_bv(),
                                             name='bv'),
                                         name='bv')  # visible bias
     self.bh = parameters.BiasParameters(self,
                                         self.h,
                                         theano.shared(
                                             value=self._initial_bh(),
                                             name='bh'),
                                         name='bh')  # hidden bias
 def __init__(self, n_visible, n_hidden):
     super(TruncExpBinaryRBM, self).__init__()
     # data shape
     self.n_visible = n_visible
     self.n_hidden = n_hidden
     # units
     self.v = units.TruncatedExponentialUnits(self, name='v')  # visibles
     self.h = units.BinaryUnits(self, name='h')  # hiddens
     # parameters
     self.W = parameters.ProdParameters(self, [self.v, self.h],
                                        theano.shared(
                                            value=self._initial_W(),
                                            name='W'),
                                        name='W')  # weights
     self.bv = parameters.BiasParameters(self,
                                         self.v,
                                         theano.shared(
                                             value=self._initial_bv(),
                                             name='bv'),
                                         name='bv')  # visible bias
     self.bh = parameters.BiasParameters(self,
                                         self.h,
                                         theano.shared(
                                             value=self._initial_bh(),
                                             name='bh'),
                                         name='bh')  # hidden bias
 def __init__(self, n_visible, n_hidden):
     super(LearntPrecisionGaussianBinaryRBM, self).__init__()
     # data shape
     self.n_visible = n_visible
     self.n_hidden = n_hidden
     # units
     self.v = units.LearntPrecisionGaussianUnits(self, name='v')  # visibles
     self.h = units.BinaryUnits(self, name='h')  # hiddens
     # parameters
     self.Wm = parameters.ProdParameters(self, [self.v, self.h],
                                         theano.shared(
                                             value=self._initial_W(),
                                             name='Wm'),
                                         name='Wm')  # weights
     self.Wp = parameters.ProdParameters(
         self, [self.v.precision_units, self.h],
         theano.shared(value=-np.abs(self._initial_W()) / 1000, name='Wp'),
         name='Wp')  # weights
     self.bvm = parameters.BiasParameters(
         self,
         self.v,
         theano.shared(value=self._initial_bias(self.n_visible),
                       name='bvm'),
         name='bvm')  # visible bias
     self.bvp = parameters.BiasParameters(
         self,
         self.v.precision_units,
         theano.shared(value=self._initial_bias(self.n_visible),
                       name='bvp'),
         name='bvp')  # precision bias
     self.bh = parameters.BiasParameters(
         self,
         self.h,
         theano.shared(value=self._initial_bias(self.n_hidden), name='bh'),
         name='bh')  # hidden bias
 def __init__(self, n_visible, n_hidden_mean, n_hidden_precision):
     super(LearntPrecisionSeparateGaussianBinaryRBM, self).__init__()
     # data shape
     self.n_visible = n_visible
     self.n_hidden_mean = n_hidden_mean
     self.n_hidden_precision = n_hidden_precision
     # units
     self.v = units.LearntPrecisionGaussianUnits(self, name='v')  # visibles
     self.hm = units.BinaryUnits(self, name='hm')  # hiddens for mean
     self.hp = units.BinaryUnits(self, name='hp')  # hiddens for precision
     # parameters
     self.Wm = parameters.ProdParameters(
         self, [self.v, self.hm],
         theano.shared(value=self._initial_W(self.n_visible,
                                             self.n_hidden_mean),
                       name='Wm'),
         name='Wm')  # weights
     self.Wp = parameters.ProdParameters(
         self, [self.v.precision_units, self.hp],
         theano.shared(value=-np.abs(
             self._initial_W(self.n_visible, self.n_hidden_precision)) /
                       1000,
                       name='Wp'),
         name='Wp')  # weights
     self.bvm = parameters.BiasParameters(
         self,
         self.v,
         theano.shared(value=self._initial_bias(self.n_visible),
                       name='bvm'),
         name='bvm')  # visible bias
     self.bvp = parameters.BiasParameters(
         self,
         self.v.precision_units,
         theano.shared(value=self._initial_bias(self.n_visible),
                       name='bvp'),
         name='bvp')  # precision bias
     self.bhm = parameters.BiasParameters(
         self,
         self.hm,
         theano.shared(value=self._initial_bias(self.n_hidden_mean),
                       name='bhm'),
         name='bhm')  # hidden bias for mean
     self.bhp = parameters.BiasParameters(
         self,
         self.hp,
         theano.shared(value=self._initial_bias(self.n_hidden_precision) +
                       1.0,
                       name='bhp'),
         name='bhp')  # hidden bias for precision
Exemplo n.º 5
0
    def __init__(self, n_visible, n_hidden, n_factors):
        super(FactoredBinaryBinaryRBM, self).__init__()
        # data shape
        self.n_visible = n_visible
        self.n_hidden = n_hidden
        self.n_factors = n_factors
        # units
        self.v = units.BinaryUnits(self, name='v')  # visibles
        self.h = units.BinaryUnits(self, name='h')  # hiddens
        # parameters
        Wv = theano.shared(value=self._initial_W(self.n_visible,
                                                 self.n_factors),
                           name='Wv')
        Wh = theano.shared(value=self._initial_W(self.n_hidden,
                                                 self.n_factors),
                           name='Wh')
        self.F = factors.Factor(self, name='F')  # factor
        self.Wv = parameters.ProdParameters(self.F, [self.v, self.F],
                                            Wv,
                                            name='Wv')
        self.Wh = parameters.ProdParameters(self.F, [self.h, self.F],
                                            Wh,
                                            name='Wh')
        self.F.initialize()

        self.bv = parameters.BiasParameters(self,
                                            self.v,
                                            theano.shared(
                                                value=self._initial_bv(),
                                                name='bv'),
                                            name='bv')  # visible bias
        self.bh = parameters.BiasParameters(self,
                                            self.h,
                                            theano.shared(
                                                value=self._initial_bh(),
                                                name='bh'),
                                            name='bh')  # hidden bias
Exemplo n.º 6
0
# This example shows how the FIOTRBM model from "Facial Expression Transfer with
# Input-Output Temporal Restricted Boltzmann Machines" by Zeiler et al. (NIPS
# 2011) can be recreated in Morb.

rbm = base.RBM()
rbm.v = units.GaussianUnits(rbm)  # output (visibles)
rbm.h = units.BinaryUnits(rbm)  # latent (hiddens)
rbm.s = units.Units(rbm)  # input (context)
rbm.vp = units.Units(rbm)  # output history (context)

initial_A = ...
initial_B = ...
initial_bv = ...
initial_bh = ...
initial_Wv = ...
initial_Wh = ...
initial_Ws = ...

parameters.FixedBiasParameters(
    rbm, rbm.v.precision_units)  # add precision term to the energy function
rbm.A = parameters.ProdParameters(
    rbm, [rbm.vp, rbm.v],
    initial_A)  # weights from past output to current output
rbm.B = parameters.ProdParameters(
    rbm, [rbm.vp, rbm.h], initial_B)  # weights from past output to hiddens
rbm.bv = parameters.BiasParameters(rbm, rbm.v, initial_bv)  # visible bias
rbm.bh = parameters.BiasParameters(rbm, rbm.h, initial_bh)  # hidden bias
rbm.W = parameters.ThirdOrderFactoredParameters(
    rbm, [rbm.v, rbm.h, rbm.s],
    [initial_Wv, initial_Wh, initial_Ws])  # factored third order weights
Exemplo n.º 7
0
    size=(n_visible, n_hidden, n_context)),
                       dtype=theano.config.floatX)
initial_bv = np.zeros(n_visible, dtype=theano.config.floatX)
initial_bh = np.zeros(n_hidden, dtype=theano.config.floatX)

rbm = morb.base.RBM()
rbm.v = units.BinaryUnits(rbm, name='v')  # visibles
rbm.h = units.BinaryUnits(rbm, name='h')  # hiddens
rbm.x = units.Units(rbm, name='x')  # context

rbm.W = parameters.ThirdOrderParameters(rbm, [rbm.v, rbm.h, rbm.x],
                                        theano.shared(value=initial_W,
                                                      name='W'),
                                        name='W')  # weights
rbm.bv = parameters.BiasParameters(rbm,
                                   rbm.v,
                                   theano.shared(value=initial_bv, name='bv'),
                                   name='bv')  # visible bias
rbm.bh = parameters.BiasParameters(rbm,
                                   rbm.h,
                                   theano.shared(value=initial_bh, name='bh'),
                                   name='bh')  # hidden bias

initial_vmap = {rbm.v: T.matrix('v'), rbm.x: T.matrix('x')}

# try to calculate weight updates using CD-1 stats
print ">> Constructing contrastive divergence updaters..."
s = stats.cd_stats(rbm,
                   initial_vmap,
                   visible_units=[rbm.v],
                   hidden_units=[rbm.h],
                   context_units=[rbm.x],