def sample(self, z, flat=True): z_reshaped = [] num_samples = -1 #z.get_shape().as_list()[0] x = tf.reshape(z[:, :self.z_shape_list[-1][0]* self.z_shape_list[-1][1]* self.z_shape_list[-1][2]], [num_samples] + list(self.z_shape_list[-1])) z = z[:, self.z_shape_list[-1][0]*self.z_shape_list[-1][1]*self.z_shape_list[-1][2]:] for l in reversed(range(1, self.L)): for k in reversed(range(0, self.K)): x = self.blocks[l][k](x, inverse=True) x = ops.unsqueeze2d(x) x_concat = tf.reshape(z[:, :self.z_shape_list[l-1][0]* self.z_shape_list[l-1][1]* self.z_shape_list[l-1][2]], [num_samples] + list(self.z_shape_list[l-1])) z = z[:, self.z_shape_list[l-1][0]* self.z_shape_list[l-1][1]* self.z_shape_list[l-1][2]:] x = tf.concat([x_concat, x], axis=-1) l = 0 for k in reversed(range(0, self.K)): x = self.blocks[l][k](x, inverse=True) x_reconstructed = ops.unsqueeze2d(x) return x_reconstructed
def split2d_reverse(z, eps, hps=None, name=None): with tf.variable_scope(name, reuse=tf.AUTO_REUSE): z1 = unsqueeze2d(z) pz = split2d_prior(z1, hps=hps) z2 = pz.sample(eps=eps) z = tf.concat([z1, z2], 3) return z
def f_decode(self, y, eps): with tf.variable_scope('model', reuse=tf.AUTO_REUSE): y_onehot = tf.cast(tf.one_hot(y, self.cfg.n_y, 1, 0), 'float32') _, sample, _ = prior("prior", y_onehot, self.cfg) z = sample(eps=eps[-1]) z = self.decoder(z, eps=eps[:-1]) z = ops.unsqueeze2d(z, 2) # 8x8x12 -> 16x16x3 x = self.postprocess(z) return x
def sample(self, y, temp=1.0, eps=None, post_process=True): if eps is None: eps = [None]*self.cfg.n_levels with tf.variable_scope('model', reuse=tf.AUTO_REUSE): y_onehot = tf.cast(tf.one_hot(y, self.cfg.n_y, 1, 0), 'float32') _, sample, _ = prior("prior", y_onehot, self.cfg) z = sample(temp=temp) x = self.decoder(z, eps) x = ops.unsqueeze2d(x, 2) # 8x8x12 -> 16x16x3 if post_process: x = self.postprocess(x) return x
def decode(self, labels=None, condition=None, epsilon=None): """ 参数列表: 标签: Class label, could be none 条件参数: 2D or 4D tensor, condition for dynamic linear transformation epsilon: None or list. If specified, it should be a list with `num_levels` elements 返回值: x: 4D tensor, generated samples """ with tf.variable_scope("prior", reuse=tf.AUTO_REUSE): if labels is None: y_onehot = self.y elif len(labels.shape) == 1: y_onehot = tf.one_hot(labels, depth=self.num_classes, dtype=tf.float32) elif len(labels.shape) == 2: y_onehot = labels _, sample, get_eps = prior(y_onehot, self.hps) if epsilon is not None: eps = epsilon if len( epsilon) == self.hps.num_levels else [None] * ( self.hps.num_levels - 1) + epsilon else: eps = [None] * self.hps.num_levels z = sample(eps=eps[-1]) objective = tf.zeros(tf.shape(z)[0]) if self.hps.conditioning and condition is None: condition = y_onehot # with tf.variable_scope("cond_preprocess", reuse=tf.AUTO_REUSE): # condition = tf.layers.dense(condition, units=10, use_bias=False) z, objective = codec(z, cond=condition, hps=self.hps, reverse=True, objective=objective, eps=eps[:-1], reuse=tf.AUTO_REUSE) with tf.variable_scope("postprocess"): x = unsqueeze2d(z) x = tf.clip_by_value( tf.floor((x + .5) * self.num_bins) * (256. / self.num_bins), 0, 255) self.gen_x = tf.cast(x, 'uint8') return self.gen_x
def split2d_reverse(name, z, eps, eps_std, cfg): with tf.variable_scope(name): z1 = ops.unsqueeze2d(z) pz = split2d_prior(z1, cfg) if eps is not None: # Already sampled eps z2 = pz.sample_eps(eps) elif eps_std is not None: # Sample with given eps_std z2 = pz.sample_eps(pz.eps * tf.reshape(eps_std, [-1, 1, 1, 1])) else: # Sample normally z2 = pz.sample(1.0) z = tf.concat([z1, z2], 3) return z
def decode(self, labels=None, condition=None, epsilon=None): """ Args: labels: Class label, could be none condition: 2D or 4D tensor, condition for dynamic linear transformation epsilon: None or list. If specified, it should be a list with `num_levels` elements Returns: x: 4D tensor, generated samples """ with tf.variable_scope("prior", reuse=tf.AUTO_REUSE): if labels is None: y_onehot = self.y elif len(labels.shape) == 1: y_onehot = tf.one_hot(labels, depth=self.num_classes, dtype=tf.float32) elif len(labels.shape) == 2: y_onehot = labels _, sample, get_eps = prior(y_onehot, self.hps) if epsilon is not None: eps = epsilon if len(epsilon) == self.hps.num_levels else [None] * (self.hps.num_levels-1) + epsilon else: eps = [None] * self.hps.num_levels z = sample(eps=eps[-1]) objective = tf.zeros(tf.shape(z)[0]) if self.hps.conditioning and condition is None: condition = y_onehot # with tf.variable_scope("cond_preprocess", reuse=tf.AUTO_REUSE): # condition = tf.layers.dense(condition, units=10, use_bias=False) z, objective = codec(z, cond=condition, hps=self.hps, reverse=True, objective=objective, eps=eps[:-1], reuse=tf.AUTO_REUSE) with tf.variable_scope("postprocess"): x = unsqueeze2d(z) x = tf.clip_by_value(tf.floor((x+.5)*self.num_bins)*(256./self.num_bins), 0, 255) self.gen_x = tf.cast(x, 'uint8') return self.gen_x