def __init__(self,
              vis_dim,
              hid_dim,
              w=None,
              l=None,
              vis_b=None,
              hid_b=None):
     self.vis_dim = vis_dim
     self.hid_dim = hid_dim
     if w is not None:
         self.w = w
     else:
         self.w = tfe.Variable(tf_xavier_init(self.vis_dim,
                                              self.hid_dim,
                                              const=4.0),
                               name='semibm.w')
     if l is not None:
         self.l = l
     else:
         self.l = tfe.Variable(tf_xavier_init(self.vis_dim,
                                              self.vis_dim,
                                              const=4.0),
                               name='semibm.l')
     if hid_b is not None:
         self.hid_b = hid_b
     else:
         self.hid_b = tfe.Variable(tf.zeros([self.hid_dim]),
                                   dtype=tf.float32,
                                   name='semibm.hid_b')
     if vis_b is not None:
         self.vis_b = vis_b
     else:
         self.vis_b = tfe.Variable(tf.zeros([self.vis_dim]),
                                   dtype=tf.float32,
                                   name='semibm.vis_b')
    def __init__(self,
                 dims,
                 name,
                 activation_fn=tf.nn.relu,
                 temp=.1,
                 hard=False):
        self.dims = dims
        self.ws, self.bs = [], []

        if activation_fn == tf.nn.relu:
            const = 1.0
        else:
            const = 4.0

        assert len(self.dims) > 1
        for d1, d2, i in zip(dims[:-1], dims[1:], xrange(1, len(self.dims))):
            self.ws.append(
                tfe.Variable(tf_xavier_init(d1, d2, const=const),
                             name='enc.' + name + '.w.' + str(i)))
            self.bs.append(
                tfe.Variable(tf.zeros([d2]),
                             dtype=tf.float32,
                             name='enc.' + name + '.b.' + str(i)))

        self.activation_fn = activation_fn
        self.temp = temp
        self.hard = hard
Exemplo n.º 3
0
    def __init__(self,
                 dim_z2h,
                 dim_h2v,
                 activation_fn=tf.nn.relu,
                 temp=.1,
                 hard=False,
                 train_pi=True):
        self.prior_h = NADE(dim_z2h[1], dim_z2h[0], temp)
        self.dim_h2v = dim_h2v
        self.w_h2v, self.b_h2v = [], []
        if activation_fn == tf.nn.relu:
            const = 1.0
        else:
            const = 4.0

        assert len(self.dim_h2v) > 1
        for d1, d2, i in zip(dim_h2v[:-1], dim_h2v[1:],
                             xrange(1, len(self.dim_h2v))):
            self.w_h2v.append(
                tfe.Variable(tf_xavier_init(d1, d2, const=const),
                             name='sbn.w_h2v.' + str(i)))
            self.b_h2v.append(
                tfe.Variable(tf.zeros([d2]),
                             dtype=tf.float32,
                             name='sbn.b_h2v.' + str(i)))

        self.activation_fn = activation_fn
        self.temp = temp
        self.hard = hard
 def __init__(self,
              vis_dim,
              hid_dim1,
              hid_dim2,
              w1=None,
              w2=None,
              vis_b=None,
              hid_b1=None,
              hid_b2=None):
     self.vis_dim = vis_dim
     self.hid_dim1 = hid_dim1
     self.hid_dim2 = hid_dim2
     if w1 is not None:
         self.w1 = w1
     else:
         self.w1 = tfe.Variable(tf_xavier_init(self.vis_dim,
                                               self.hid_dim1,
                                               const=4.0),
                                name='dbm.w1')
     if w2 is not None:
         self.w2 = w2
     else:
         self.w2 = tfe.Variable(tf_xavier_init(self.hid_dim1,
                                               self.hid_dim2,
                                               const=4.0),
                                name='dbm.w2')
     if hid_b1 is not None:
         self.hid_b1 = hid_b1
     else:
         self.hid_b1 = tfe.Variable(tf.zeros([self.hid_dim1]),
                                    dtype=tf.float32,
                                    name='dbm.hid_b1')
     if hid_b2 is not None:
         self.hid_b2 = hid_b2
     else:
         self.hid_b2 = tfe.Variable(tf.zeros([self.hid_dim2]),
                                    dtype=tf.float32,
                                    name='dbm.hid_b2')
     if vis_b is not None:
         self.vis_b = vis_b
     else:
         self.vis_b = tfe.Variable(tf.zeros([self.vis_dim]),
                                   dtype=tf.float32,
                                   name='dbm.vis_b')
Exemplo n.º 5
0
    def __init__(self,
                 dim_z2h,
                 dim_h2v,
                 activation_fn=tf.nn.relu,
                 temp=.1,
                 hard=False,
                 has_h=True):
        self.dim_z2h = dim_z2h
        self.w_z2h, self.b_z2h = [], []

        if activation_fn == tf.nn.relu:
            const = 1.0
        else:
            const = 4.0

        if has_h:
            assert len(self.dim_z2h) > 1
            for d1, d2, i in zip(dim_z2h[:-1], dim_z2h[1:],
                                 xrange(1, len(self.dim_z2h))):
                self.w_z2h.append(
                    tfe.Variable(tf_xavier_init(d1, d2, const=const),
                                 name='sbn.w_z2h.' + str(i)))
                self.b_z2h.append(
                    tfe.Variable(tf.zeros([d2]),
                                 dtype=tf.float32,
                                 name='sbn.b_z2h.' + str(i)))

        self.dim_h2v = dim_h2v
        self.w_h2v, self.b_h2v = [], []
        assert len(self.dim_h2v) > 1
        for d1, d2, i in zip(dim_h2v[:-1], dim_h2v[1:],
                             xrange(1, len(self.dim_h2v))):
            self.w_h2v.append(
                tfe.Variable(tf_xavier_init(d1, d2, const=const),
                             name='sbn.w_h2v.' + str(i)))
            self.b_h2v.append(
                tfe.Variable(tf.zeros([d2]),
                             dtype=tf.float32,
                             name='sbn.b_h2v.' + str(i)))

        self.activation_fn = activation_fn
        self.temp = temp
        self.hard = hard
        self.has_h = has_h  # Turning it false to train a 2 layer sbn for debugging, otherwise always true
    def __init__(self, dims, name, activation_fn=tf.nn.tanh):
        self.dims = dims
        self.ws, self.bs = [], []

        self.activation_fn = activation_fn
        if activation_fn == tf.nn.relu:
            const = 1.0
        else:
            const = 4.0

        assert len(self.dims) > 1
        for d1, d2, i in zip(dims[:-2], dims[1:-1],
                             xrange(1,
                                    len(self.dims) - 1)):
            self.ws.append(
                tfe.Variable(tf_xavier_init(d1, d2, const=const),
                             name='enc.' + name + '.w.' + str(i)))
            self.bs.append(
                tfe.Variable(tf.zeros([d2]),
                             dtype=tf.float32,
                             name='enc.' + name + '.b.' + str(i)))

        self.ws.append(
            tfe.Variable(tf_xavier_init(dims[-2], dims[-1], const=const),
                         name='enc.' + name + '.w.out.mu'))
        self.bs.append(
            tfe.Variable(tf.zeros([dims[-1]]),
                         dtype=tf.float32,
                         name='enc.' + name + '.b.out.mu'))
        self.ws.append(
            tfe.Variable(tf_xavier_init(dims[-2], dims[-1], const=const),
                         name='enc.' + name + '.w.out.logvar'))
        self.bs.append(
            tfe.Variable(tf.zeros([dims[-1]]),
                         dtype=tf.float32,
                         name='enc.' + name + '.b.out.logvar'))