def decode(self, z): if self.TYPE_PX=='Gaussian': mean, log_var = dgm._forward_pass_Gauss(z, self.Pz_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) return mean, log_var elif self.TYPE_PX=='Bernoulli': pi = dgm._forward_pass_Bernoulli(z, self.Pz_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) return pi
def _sample_xy(self, n_samples=int(1e3)): saver = tf.train.Saver() with tf.Session() as session: ckpt = tf.train.get_checkpoint_state(self.ckpt_dir) saver.restore(session, ckpt.model_checkpoint_path) self.phase = False z_ = np.random.normal(size=(n_samples, self.Z_DIM)).astype('float32') p = np.ones(self.NUM_CLASSES) * (1 / self.NUM_CLASSES) y_ = np.random.multinomial(1, p, size=n_samples).astype('float32') h = tf.concat([z_, y_], axis=1) if self.TYPE_PX == 'Gaussian': mean, logvar = dgm._forward_pass_Gauss(h, self.Pzy_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) eps = tf.random_normal([n_samples, self.X_DIM], dtype=tf.float32) x_ = mean + tf.sqrt(tf.exp(logvar)) * eps else: x_ = dgm._forward_pass_Bernoulli(h, self.Pzy_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) x = session.run([x_]) return x[0], y_
def _sample_Z(self, x, y, n_samples): """ Sample from Z with the reparamterization trick """ h = tf.concat([x, y], axis=1) mean, log_var = dgm._forward_pass_Gauss(h, self.Qxy_z, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) eps = tf.random_normal([tf.shape(x)[0], self.Z_DIM], dtype=tf.float32) return mean, log_var, mean + tf.sqrt(tf.exp(log_var)) * eps
def _compute_logpx(self, x, a, z, y=None): """ compute the likelihood of every element in x under p(x|z,a) """ h = tf.concat([a,z], axis=1) if self.TYPE_PX == 'Gaussian': mean, log_var = dgm._forward_pass_Gauss(h, self.Pza_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) return dgm._gauss_logp(x, mean, log_var) elif self.TYPE_PX == 'Bernoulli': logits = dgm._forward_pass_Cat_logits(h, self.Pza_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(labels=x, logits=logits),axis=1)
def encode(self, x): y_ = dgm._forward_pass_Cat(x, self.Qx_y, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) y_ = tf.one_hot(tf.argmax(y_, axis=1), self.NUM_CLASSES) h = tf.concat([x, y_], axis=1) zm, zlv = dgm._forward_pass_Gauss(h, self.Qxy_z, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) return zm
def _sample_a(self, x, n_samples): """ Sample from a with the reparamterization trick """ mean, log_var = dgm._forward_pass_Gauss(x, self.Qx_a, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) eps = tf.random_normal([tf.shape(x)[0], self.A_DIM], 0, 1, dtype=tf.float32) return mean, log_var, mean + tf.sqrt(tf.exp(log_var)) * eps
def _labeled_loss(self, x, y): """ Compute necessary terms for labeled loss (per data point) """ qa_mean, qa_log_var, a = self._sample_a(x, self.Z_SAMPLES) q_mean, q_log_var, z = self._sample_Z(x, y, a, self.Z_SAMPLES) pa_mean, pa_log_var = dgm._forward_pass_Gauss(z, self.Pz_a, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) l_px = self._compute_logpx(x, a, z) l_py = self._compute_logpy(y, x, z, a) l_pz = dgm._gauss_logp(z, tf.zeros_like(z), tf.log(tf.ones_like(z))) l_pa = dgm._gauss_logp(a, pa_mean, pa_log_var) l_qz = dgm._gauss_logp(z, q_mean, q_log_var) l_qa = dgm._gauss_logp(a, qa_mean, qa_log_var) return l_px + l_py + self.beta * (l_pz + l_pa - l_qz - l_qa)
def _sample_xy(self, n_samples=int(1e3)): saver = tf.train.Saver() with tf.Session() as session: ckpt = tf.train.get_checkpoint_state(self.ckpt_dir) saver.restore(session, ckpt.model_checkpoint_path) self.phase=False z_ = np.random.normal(size=(n_samples, self.Z_DIM)).astype('float32') if self.TYPE_PX=='Gaussian': x_ = dgm._forward_pass_Gauss(z_, self.Pz_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase)[0] else: x_ = dgm._forward_pass_Bernoulli(z_, self.Pz_x, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) h = tf.concat([x_,z_], axis=1) y_ = dgm._forward_pass_Cat(h, self.Pzx_y, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) x,y = session.run([x_,y_]) return x,y
def encode(self, x): mean, log_var = dgm._forward_pass_Gauss(x, self.Qx_z, self.NUM_HIDDEN, self.NONLINEARITY, self.batchnorm, self.phase) return mean