def qz_xy(x, y, n_z, n_particles): with zs.BayesianNet() as variational: lz_xy = tf.layers.dense(tf.to_float(tf.concat([x, y], 1)), 500, activation=tf.nn.relu) lz_xy = tf.layers.dense(lz_xy, 500, activation=tf.nn.relu) lz_mean = tf.layers.dense(lz_xy, n_z) lz_logstd = tf.layers.dense(lz_xy, n_z) z = zs.Normal('z', lz_mean, logstd=lz_logstd, n_samples=n_particles, group_ndims=1) return variational
def labeled_proposal(x, y, z_dim, n_particles): bn = zs.BayesianNet() z_mean, z_logstd = qz_xy(x, y, z_dim) bn.normal("z", z_mean, logstd=z_logstd, n_samples=n_particles, group_ndims=1, is_reparameterized=False) return bn
def q_net(observed, x_dim, z_dim, n_z_per_x): with zs.BayesianNet(observed=observed) as variational: x = zs.Empirical('x', tf.int32, (None, x_dim)) lz_x = tf.layers.dense(tf.to_float(x), 500, activation=tf.nn.relu) lz_x = tf.layers.dense(lz_x, 500, activation=tf.nn.relu) z_mean = tf.layers.dense(lz_x, z_dim) z_logstd = tf.layers.dense(lz_x, z_dim) z = zs.Normal('z', z_mean, logstd=z_logstd, group_ndims=1, n_samples=n_z_per_x) return variational
def build_q_net(x, y, z_dim, y_dim): bn = zs.BayesianNet() # concatenate x and y x = tf.concat(axis=1, values=[x, y]) h = tf.layers.dense(x, 500, activation=tf.nn.relu) h = tf.layers.dense(h, 500, activation=tf.nn.relu) z_mean = tf.layers.dense(h, z_dim) z_logstd = tf.layers.dense(h, z_dim) bn.normal("z", z_mean, logstd=z_logstd, group_ndims=1) return bn
def labeled_proposal(x, y, n_z, n_particles): with zs.BayesianNet() as proposal: z_mean, z_logstd = qz_xy(x, y, n_z) z = zs.Normal('z', z_mean, logstd=z_logstd, n_samples=n_particles, group_event_ndims=1, is_reparameterized=False) return proposal
def test_session_run_issue_49(self): # test fix for the bug at https://github.com/thu-ml/zhusuan/issues/49 with zs.BayesianNet(observed={}) as model: x_mean = tf.zeros([1, 2]) x_logstd = tf.zeros([1, 2]) x = zs.Normal('x', mean=x_mean, logstd=x_logstd, group_ndims=1) with self.test_session(use_gpu=True) as sess: sess.run(tf.global_variables_initializer()) _ = sess.run(x)
def vae(observed, x_dim, z_dim, n, n_particles=1): with zs.BayesianNet(observed=observed) as model: z_mean = tf.zeros([n, z_dim]) z = zs.Normal('z', z_mean, std=1., group_ndims=1) lx_z = tf.layers.dense(z, 500, activation=tf.nn.relu) lx_z = tf.layers.dense(lx_z, 500, activation=tf.nn.relu) x_logits = tf.layers.dense(lx_z, x_dim) x_mean = zs.Implicit("x_mean", tf.sigmoid(x_logits), group_ndims=1) x = zs.Bernoulli('x', x_logits, group_ndims=1, n_samples=n_particles) return model
def lntm(observed, D, K, V, eta_mean, eta_logstd): with zs.BayesianNet(observed=observed) as model: eta = zs.Normal('eta', tf.tile(tf.expand_dims(eta_mean, 0), [D, 1]), logstd=tf.tile(tf.expand_dims(eta_logstd, 0), [D, 1]), group_event_ndims=1) beta = zs.Normal('beta', tf.zeros([K, V]), logstd=tf.ones([K, V]) * log_delta, group_event_ndims=1) return model
def build_gen(x_dim, z_dim, n, n_particles=1): bn = zs.BayesianNet() z_mean = tf.zeros([n, z_dim]) z = bn.normal("z", z_mean, std=1., group_ndims=1, n_samples=n_particles) h = tf.layers.dense(z, 500, activation=tf.nn.relu) h = tf.layers.dense(h, 500, activation=tf.nn.relu) x_logits = tf.layers.dense(h, x_dim) bn.deterministic("x_mean", tf.sigmoid(x_logits)) bn.bernoulli("x", x_logits, group_ndims=1) return bn
def build_q_net(x, y, z_dim, n_z_per_x): bn = zs.BayesianNet() print(x.shape, y.shape) cat_x_y = tf.concat([x, y], axis=1) h = tf.layers.dense(tf.cast(cat_x_y, tf.float32), 500, activation=tf.nn.relu) h = tf.layers.dense(h, 500, activation=tf.nn.relu) z_mean = tf.layers.dense(h, z_dim) z_logstd = tf.layers.dense(h, z_dim) bn.normal("z", z_mean, logstd=z_logstd, group_ndims=1, n_samples=n_z_per_x) return bn
def build_gen(y, x_dim, z_dim, n): bn = zs.BayesianNet() z = bn.normal("z", tf.zeros([n, z_dim]), std=1., group_ndims=1) input = tf.concat([z, y], axis=1) h = tf.layers.dense(input, 500, activation=tf.nn.relu) h = tf.layers.dense(h, 500, activation=tf.nn.relu) x_logits = tf.layers.dense(h, x_dim) x_mean = bn.deterministic("x_mean", tf.sigmoid(x_logits)) x = bn.bernoulli("x", x_logits, group_ndims=1, dtype=tf.float32) return bn
def vae(observed, n, n_x, n_z): with zs.BayesianNet(observed=observed) as model: z_mean = tf.zeros([n, n_z]) z_logstd = tf.zeros([n, n_z]) z = zs.Normal('z', z_mean, logstd=z_logstd, group_event_ndims=1) lx_z = layers.fully_connected(z, 500) lx_z = layers.fully_connected(lx_z, 500) x_logits = layers.fully_connected(lx_z, n_x, activation_fn=None) x = zs.Bernoulli('x', x_logits, group_event_ndims=1) return model, x_logits
def mean_field_variational(n_particles): with zs.BayesianNet() as variational: z_mean, z_logstd = [], [] for i in range(2): z_mean.append(tf.Variable(-2.)) z_logstd.append(tf.Variable(-5.)) _ = zs.Normal('z' + str(i + 1), z_mean[i], logstd=z_logstd[i], n_samples=n_particles) return variational, z_mean, z_logstd
def p_Y_Xw(observed, X, drop_rate, n_basis, net_sizes, n_samples, task): with zs.BayesianNet(observed=observed) as model: f = tf.expand_dims(X, 1) for i in range(len(net_sizes)-1): f = tf.layers.dense(f, net_sizes[i+1]) if(i < len(net_sizes)-2): f = tf.nn.relu(f) f = tf.squeeze(f, [1]) if(task == "classification"): f = tf.nn.softmax(f) return model, f, None
def q_net(x, y, z_dim): with zs.BayesianNet() as variational: x_plus_y = tf.concat([x, y], axis=1) lz_x = tf.layers.dense(tf.to_float(x_plus_y), 500, activation=tf.nn.relu) lz_x = tf.layers.dense(lz_x, 500, activation=tf.nn.relu) z_mean = tf.layers.dense(lz_x, z_dim) z_logstd = tf.layers.dense(lz_x, z_dim) z = zs.Normal('z', z_mean, logstd=z_logstd, group_ndims=1) return variational
def vae(observed, x_dim, z_dim, n, y): with zs.BayesianNet(observed=observed) as model: z_mean = tf.zeros([n, z_dim]) z = zs.Normal('z', z_mean, std=1., group_ndims=1) z_plus_y = tf.concat([z, tf.cast(y, tf.float32)], axis=1) lx_z = tf.layers.dense(z_plus_y, 500, activation=tf.nn.relu) lx_z = tf.layers.dense(lx_z, 500, activation=tf.nn.relu) x_logits = tf.layers.dense(lx_z, x_dim) x_mean = zs.Implicit("x_mean", tf.sigmoid(x_logits), group_ndims=1) x = zs.Bernoulli('x', x_logits, group_ndims=1) return model, x_mean
def q_net(x, z_dim): ''' Encoder: q(z|x) ''' with zs.BayesianNet() as variational: lz_x = layers.fully_connected(tf.to_float(x), 500) lz_x = layers.fully_connected(lz_x, 500) z_mean = layers.fully_connected(lz_x, z_dim, activation_fn=None) z_logstd = layers.fully_connected(lz_x, z_dim, activation_fn=None) z = zs.Normal('z', z_mean, logstd=z_logstd, group_ndims=1) return variational
def q_net(x, n_h, n_particles): with zs.BayesianNet() as proposal: h1_logits = layers.fully_connected(tf.to_float(x), n_h, activation_fn=None) h1 = zs.Bernoulli('h1', h1_logits, n_samples=n_particles, group_ndims=1, dtype=tf.float32) h2_logits = layers.fully_connected(h1, n_h, activation_fn=None) h2 = zs.Bernoulli('h2', h2_logits, group_ndims=1, dtype=tf.float32) h3_logits = layers.fully_connected(h2, n_h, activation_fn=None) h3 = zs.Bernoulli('h3', h3_logits, group_ndims=1, dtype=tf.float32) return proposal
def build_gen(x_dim, z_dim, y, n, n_particles=1): bn = zs.BayesianNet() z_mean = tf.zeros([n, z_dim]) z = bn.normal("z", z_mean, std=1., group_ndims=1, n_samples=n_particles) y = tf.reshape(y, [1, tf.shape(y)[0], 10]) y = tf.tile(y, [n_particles, 1, 1]) h = tf.layers.dense(tf.concat([y, z], axis=2), 500, activation=tf.nn.relu) h = tf.layers.dense(h, 500, activation=tf.nn.relu) x_logits = tf.layers.dense(h, x_dim) bn.deterministic("x_mean", tf.sigmoid(x_logits)) bn.bernoulli("x", x_logits, group_ndims=1) return bn
def q_net(x, z_dim, n_particles): with zs.BayesianNet() as variational: lz_x = layers.fully_connected(tf.to_float(x), 500) lz_x = layers.fully_connected(lz_x, 500) z_mean = layers.fully_connected(lz_x, z_dim, activation_fn=None) z_logstd = layers.fully_connected(lz_x, z_dim, activation_fn=None) z = zs.Normal('z', z_mean, logstd=z_logstd, group_ndims=1, n_samples=n_particles) return variational
def q_net(x, z_dim, n_particles): with zs.BayesianNet() as variational: lz_x = tf.layers.dense(tf.to_float(x), 500, activation=tf.nn.relu) lz_x = tf.layers.dense(lz_x, 500, activation=tf.nn.relu) z_mean = tf.layers.dense(lz_x, z_dim) z_logstd = tf.layers.dense(lz_x, z_dim) z = zs.Normal('z', z_mean, logstd=z_logstd, group_ndims=1, n_samples=n_particles) return variational
def q_net(observed, x, n_z, n_particles): with zs.BayesianNet(observed=observed) as variational: lz_x = tf.layers.dense(tf.to_float(x), 500, activation=tf.nn.relu) lz_x = tf.layers.dense(lz_x, 500, activation=tf.nn.relu) lz_mean = tf.layers.dense(lz_x, n_z) lz_logstd = tf.layers.dense(lz_x, n_z) z = zs.Normal('z', lz_mean, logstd=lz_logstd, n_samples=n_particles, group_ndims=1) return variational
def build_q_net(x, z_dim, n_particles): bn = zs.BayesianNet() h = tf.layers.dense(tf.cast(x, tf.float32), 500, activation=tf.nn.relu) h = tf.layers.dense(h, 500, activation=tf.nn.relu) z_mean = tf.layers.dense(h, z_dim) z_logstd = tf.layers.dense(h, z_dim) bn.normal("z", z_mean, logstd=z_logstd, group_ndims=1, n_samples=n_particles) return bn
def M2(observed, n, n_x, n_y, n_z, n_particles): with zs.BayesianNet(observed=observed) as model: z_mean = tf.zeros([n, n_z]) z = zs.Normal('z', z_mean, std=1., n_samples=n_particles, group_ndims=1) y_logits = tf.zeros([n, n_y]) y = zs.OnehotCategorical('y', y_logits, n_samples=n_particles) lx_zy = layers.fully_connected(tf.concat([z, tf.to_float(y)], 2), 500) lx_zy = layers.fully_connected(lx_zy, 500) x_logits = layers.fully_connected(lx_zy, n_x, activation_fn=None) x = zs.Bernoulli('x', x_logits, group_ndims=1) return model
def gmm(observed, n, n_x, n_z): with zs.BayesianNet(observed=observed) as model: log_pi = tf.get_variable('log_pi', n_z, initializer=tf.truncated_normal_initializer(mean=1., stddev=0.5), regularizer=var_regularizer(1.0)) mu = tf.get_variable('mu', [n_x, n_z], initializer=tf.orthogonal_initializer(gain=4.0)) # try uniform init log_sigma = tf.get_variable('log_sigma', [n_x, n_z], initializer=tf.truncated_normal_initializer(stddev=0.5), regularizer=l1_regularizer(0.01)) # try not l1_reg z = zs.OnehotCategorical('z', log_pi, n_samples=n) x_mean = tf.matmul(tf.to_float(z.tensor), tf.transpose(mu)) x_logstd = tf.matmul(tf.to_float(z.tensor), tf.transpose(log_sigma)) x = zs.Normal('x', x_mean, x_logstd, group_event_ndims=1) return model, x.tensor, z.tensor
def build_gen(y, x_dim, z_dim, n, n_particles=1): bn = zs.BayesianNet() z_mean = tf.zeros([n, z_dim]) z = bn.normal("z", z_mean, std=1., group_ndims=1, n_samples=n_particles) y = tf.cast(tf.repeat(tf.expand_dims(y, axis=0), repeats=n_particles, axis=0), tf.float32) cat_z_y = tf.concat([z, y], axis=2) h = tf.layers.dense(cat_z_y, 500, activation=tf.nn.relu) h = tf.layers.dense(h, 500, activation=tf.nn.relu) x_logits = tf.layers.dense(h, x_dim) bn.deterministic("x_mean", tf.sigmoid(x_logits)) bn.bernoulli("x", x_logits, group_ndims=1) return bn
def mean_field_variational(layer_sizes, n_particles): with zs.BayesianNet() as variational: ws = [] for i, (n_in, n_out) in enumerate(zip(layer_sizes[:-1], layer_sizes[1:])): w_mean = tf.get_variable('w_mean_' + str(i), shape=[1, n_out, n_in + 1],initializer=tf.constant_initializer(0.)) w_logstd = tf.get_variable( 'w_logstd_' + str(i), shape=[1, n_out, n_in + 1], initializer=tf.constant_initializer(0.)) ws.append( zs.Normal('w' + str(i), w_mean, logstd=w_logstd, n_samples=n_particles, group_ndims=2)) return variational
def q_net(observed, x, n_z, n_particles): with zs.BayesianNet(observed=observed) as variational: lz_x = layers.fully_connected(tf.to_float(x), 500) lz_x = layers.fully_connected(lz_x, 500) lz_mean = layers.fully_connected(lz_x, n_z, activation_fn=None) lz_logstd = layers.fully_connected(lz_x, n_z, activation_fn=None) z = zs.Normal('z', lz_mean, logstd=lz_logstd, n_samples=n_particles, group_event_ndims=1) return variational
def qz_xy(x, y, n_z, n_particles): with zs.BayesianNet() as variational: lz_xy = layers.fully_connected(tf.to_float(tf.concat([x, y], 1)), 500) lz_xy = layers.fully_connected(lz_xy, 500) lz_mean = layers.fully_connected(lz_xy, n_z, activation_fn=None) lz_logstd = layers.fully_connected(lz_xy, n_z, activation_fn=None) z = zs.Normal('z', lz_mean, logstd=lz_logstd, n_samples=n_particles, group_event_ndims=1) return variational
def unlabeled_proposal(x, n_y, n_z, n_particles): with zs.BayesianNet() as proposal: y_logits = qy_x(x, n_y) y = zs.OnehotCategorical('y', y_logits, n_samples=n_particles) x_tiled = tf.tile(tf.expand_dims(x, 0), [n_particles, 1, 1]) z_mean, z_logstd = qz_xy(x_tiled, y, n_z) z = zs.Normal('z', z_mean, logstd=z_logstd, group_event_ndims=1, is_reparameterized=False) return proposal