def call(self, inputs): net = self.layer(inputs) logits, loc, unconstrained_scale = tf.split(net, 3, axis=-1) scale = tf.nn.softplus(unconstrained_scale) + tf.keras.backend.epsilon() return ed.MixtureSameFamily( mixture_distribution=ed.Categorical(logits=logits).distribution, components_distribution=ed.Logistic(loc=loc, scale=scale).distribution)
def model(NHIDDEN, x): W = tf.Variable( tf.random_normal([NPixels * NPixels, NHIDDEN], 0.0, 0.1, dtype=tf.float32, seed=1)) b = tf.Variable( tf.random_normal([1, NHIDDEN], 0.0, 0.1, dtype=tf.float32, seed=1)) W_out = tf.Variable( tf.random_normal([NHIDDEN, NLabels], 0.0, 0.1, dtype=tf.float32, seed=1)) b_out = tf.Variable( tf.random_normal([1, NLabels], 0.0, 0.1, dtype=tf.float32, seed=1)) hidden_layer = tf.nn.tanh(tf.matmul(x, W) + b) out = tf.matmul(hidden_layer, W_out) + b_out y = ed.Categorical(logits=out, name="y") ###Prior normal = tf.distributions.Normal(0., 1.) logpiror = tf.math.reduce_sum(normal.log_prob(W)) + \ tf.math.reduce_sum(normal.log_prob(b)) + \ tf.math.reduce_sum(normal.log_prob(W_out)) + \ tf.math.reduce_sum(normal.log_prob(b_out)) return x, y, logpiror
def model(NHIDDEN, x): W = ed.Normal(loc=tf.zeros([NPixels * NPixels, NHIDDEN]), scale=1., name="W") b = ed.Normal(loc=tf.zeros([1, NHIDDEN]), scale=1., name="b") W_out = ed.Normal(loc=tf.zeros([NHIDDEN, NLabels]), scale=1., name="W_out") b_out = ed.Normal(loc=tf.zeros([1, NLabels]), scale=1., name="b_out") hidden_layer = tf.nn.relu(tf.matmul(x, W) + b) out = tf.matmul(hidden_layer, W_out) + b_out y = ed.Categorical(logits=out, name="y") return W, b, W_out, b_out, x, y
def model(NHIDDEN, x): W = ed.Normal(loc=tf.zeros([NPixels, NHIDDEN]), scale=1., name="W") b = ed.Normal(loc=tf.zeros([1, NHIDDEN]), scale=1., name="b") W_out = ed.Normal(loc=tf.zeros([NHIDDEN, 2 * NPixels]), scale=1., name="W_out") b_out = ed.Normal(loc=tf.zeros([1, 2 * NPixels]), scale=1., name="b_out") hidden_layer = tf.nn.relu(tf.matmul(x, W) + b) out = tf.matmul(hidden_layer, W_out) + b_out y = ed.Categorical(logits=tf.reshape( out, [tf.shape(x_batch)[0], NPixels, 2]), name="y") return W, b, W_out, b_out, x, y
import tensorflow as tf from tensorflow_probability import edward2 as ed N = 10000 car_door = ed.Categorical(probs=tf.constant([1. / 3., 1. / 3., 1. / 3.]), sample_shape=N, name='car_door') picked_door = ed.Categorical(probs=tf.constant([1. / 3., 1. / 3., 1. / 3.]), sample_shape=N, name='picked_door') preference = ed.Bernoulli(probs=tf.constant(0.5), sample_shape=N, name='preference') host_choice = tf.where(tf.not_equal(car_door, picked_door), 3 - car_door - picked_door, tf.where( tf.equal(car_door, 2 * tf.ones(N, dtype=tf.int32)), preference, tf.where( tf.equal(car_door, tf.ones(N, dtype=tf.int32)), 2 * preference, 1 + preference)), name='host_choice') #changed_door = 3 - host_choice - picked_door changed_door = tf.subtract(tf.subtract(3, host_choice), picked_door, name='changed_door') writer = tf.summary.FileWriter('./graphs_tfp', tf.get_default_graph())