Esempio n. 1
0
def decoder_network(latent_sample, latent_dim=J):
  # bernoulli case
  hidden2 = get_typical_layer(latent_sample, latent_dim, 500, Logistic())
  hidden2_to_output = Linear(name="last", input_dim=500, output_dim=784)
  hidden2_to_output.weights_init = IsotropicGaussian(0.01)
  hidden2_to_output.biases_init = Constant(0)
  hidden2_to_output.initialize()
  return Logistic().apply(hidden2_to_output.apply(hidden2))
Esempio n. 2
0
def encoder_network(latent_dim=J):
  x = tensor.matrix("features")
  hidden1 = get_typical_layer(x, 784, 500, Logistic())
  log_sigma_sq = get_typical_layer(hidden1, 500, latent_dim, Logistic())
  mu = get_typical_layer(hidden1, 500, latent_dim, Logistic())
  return (log_sigma_sq, mu, x)