Exemplo n.º 1
0
def showcase(cg, output_name="last_apply_output"):
  import numpy
  first = True
  test_ds = get_data_stream(False, BATCH_SIZE)
  for image in next(test_ds.get_epoch_iterator()):
    cg2 = cg.replace({cg.inputs[0]: numpy.asmatrix(image)})
    out = (VariableFilter(theano_name_regex=output_name) (cg2.variables))[-1]
    plot_batch(image, out.eval())
  plt.close()
Exemplo n.º 2
0
def get_cost(latent_dim=J):
  log_sigma_sq, mu, x = encoder_network(latent_dim)
  sigma_sq = tensor.exp(log_sigma_sq)
  eps = rng.normal((BATCH_SIZE, J))
  z = mu + tensor.sqrt(tensor.exp(0.5 * log_sigma_sq)) * eps
  y = decoder_network(z) # TODO: L > 1
  log_p_x_y = tensor.sum(tensor.log(y) * x + (1 - x) * tensor.log(1 - y), axis=1)
  KL = -0.5 * tensor.sum(1 + log_sigma_sq - mu * mu - sigma_sq, axis=1)
  cost = -tensor.mean(-KL + log_p_x_y) # over batch
  cost.name = "cost"
  return cost 

from blocks.graph import ComputationGraph
from blocks.algorithms import GradientDescent, Scale, Adam, RMSProp, AdaGrad
from blocks.filter import VariableFilter
from blocks.main_loop import MainLoop
from blocks.extensions import FinishAfter, Printing, ProgressBar
from blocks.extensions.monitoring import TrainingDataMonitoring

cost = get_cost()
cg = ComputationGraph(cost)
gd = GradientDescent(cost=cost, parameters=cg.parameters,
    step_rule=Adam())
monitor = TrainingDataMonitoring([cost], after_epoch=True)
main_loop = MainLoop(data_stream = get_data_stream(True, BATCH_SIZE), algorithm=gd, extensions=[
  monitor, FinishAfter(after_n_epochs=10), ProgressBar(), Printing()])

main_loop.run()

showcase(cg, "logistic")