Beispiel #1
0
 def create_graph():
     """Creates the dataset, model, and bound."""
     xs, lengths = datasets.create_chain_graph_dataset(
         config.batch_size,
         config.num_timesteps,
         steps_per_observation=1,
         state_size=1,
         transition_variance=config.variance,
         observation_variance=config.variance)
     model = ghmm.TrainableGaussianHMM(config.num_timesteps,
                                       config.proposal_type,
                                       transition_variances=config.variance,
                                       emission_variances=config.variance,
                                       random_seed=config.random_seed)
     true_likelihood = tf.reduce_mean(
         model.likelihood(tf.squeeze(xs)) / tf.to_float(lengths))
     outs = [true_likelihood]
     outs.extend(list(create_bound(model, xs, lengths)))
     return outs
Beispiel #2
0
 def create_graph():
     """Creates the training graph."""
     global_step = tf.train.get_or_create_global_step()
     xs, lengths = datasets.create_chain_graph_dataset(
         config.batch_size,
         config.num_timesteps,
         steps_per_observation=1,
         state_size=1,
         transition_variance=config.variance,
         observation_variance=config.variance)
     model = ghmm.TrainableGaussianHMM(config.num_timesteps,
                                       config.proposal_type,
                                       transition_variances=config.variance,
                                       emission_variances=config.variance,
                                       random_seed=config.random_seed)
     loss, bound, true_ll, gap = create_losses(model, xs, lengths)
     opt = tf.train.AdamOptimizer(config.learning_rate)
     grads = opt.compute_gradients(loss, var_list=tf.trainable_variables())
     train_op = opt.apply_gradients(grads, global_step=global_step)
     return bound, true_ll, gap, train_op, global_step