Exemple #1
0
def run_epoch(session, m, data, eval_op, config, verbose=False):
    """Run the model on the given data."""
    epoch_size = ((len(data) // m.batch_size) - 1) // m.num_steps
    start_time = time.time()
    costs = 0.0
    iters = 0
    state = [x.eval() for x in m.initial_state]
    for step, (x,
               y) in enumerate(data_iterator(data, m.batch_size, m.num_steps)):
        noise_x, noise_i, noise_h, noise_o = get_noise(x, m, config.drop_x,
                                                       config.drop_i,
                                                       config.drop_h,
                                                       config.drop_o)
        feed_dict = {
            m.input_data: x,
            m.targets: y,
            m.noise_x: noise_x,
            m.noise_i: noise_i,
            m.noise_h: noise_h,
            m.noise_o: noise_o
        }
        feed_dict.update(
            {m.initial_state[i]: state[i]
             for i in range(m.num_layers)})
        cost, state, _ = session.run([m.cost, m.final_state, eval_op],
                                     feed_dict)
        costs += cost
        iters += m.num_steps

        if verbose and step % (epoch_size // 10) == 10:
            print("%.3f perplexity: %.3f speed: %.0f wps" %
                  (step * 1.0 / epoch_size, np.exp(costs / iters),
                   iters * m.batch_size / (time.time() - start_time)))

    return np.exp(costs / iters)
Exemple #2
0
def run_mc_epoch(seed, session, m, data, eval_op, config, mc_steps, verbose=False):
  """Run the model with noise on the given data multiple times for MC evaluation."""
  n_steps = len(data)
  all_probs = np.array([0.0]*n_steps)
  sum_probs = np.array([0.0]*n_steps)
  mc_i = 1
  print("Total MC steps to do:", mc_steps)
  if not os.path.isdir('./probs'):
    print('Creating probs directory')
    os.mkdir('./probs')
  while mc_i <= mc_steps:
    print("MC sample number:", mc_i)
    epoch_size = ((len(data) // m.batch_size) - 1) // m.num_steps
    start_time = time.time()
    costs = 0.0
    iters = 0
    state = [x.eval() for x in m.initial_state]

    for step, (x, y) in enumerate(data_iterator(data, m.batch_size, m.num_steps)):
      if step == 0:
        noise_x, noise_i, noise_h, noise_o = get_noise(x, m, config.drop_x, config.drop_i, config.drop_h, config.drop_o)
      feed_dict = {m.input_data: x, m.targets: y,
                   m.noise_x: noise_x, m.noise_i: noise_i, m.noise_h: noise_h, m.noise_o: noise_o}
      feed_dict.update({m.initial_state[i]: state[i] for i in range(m.num_layers)})
      cost, state, _ = session.run([m.cost, m.final_state, eval_op], feed_dict)
      costs += cost
      iters += m.num_steps
      all_probs[step] = np.exp(-cost)
      if verbose and step % (epoch_size // 10) == 10:
        print("%.3f perplexity: %.3f speed: %.0f wps" % (step * 1.0 / epoch_size, np.exp(costs / iters),
                                                         iters * m.batch_size / (time.time() - start_time)))
    perplexity = np.exp(costs / iters)
    print("Perplexity:", perplexity)
    if perplexity < 500:
      savefile = 'probs/' + str(seed) + '_' + str(mc_i)
      print("Accepted. Saving to:", savefile)
      np.save(savefile, all_probs)
      sum_probs += all_probs
      mc_i += 1

  return np.exp(np.mean(-np.log(np.clip(sum_probs/mc_steps, 1e-10, 1-1e-10))))
Exemple #3
0
def run_epoch(m, data, config, is_train, verbose=False, log=None):
    """Run the model on the given data."""
    epoch_size = ((len(data) // config.batch_size) - 1) // config.num_steps
    start_time = time.time()
    costs = 0.0
    iters = 0
    m.reset_hidden_state()
    for step, (x, y) in enumerate(
            data_iterator(data, config.batch_size, config.num_steps)):
        if is_train:
            noise_x = get_noise_x(x, config.drop_x)
            cost = m.train(x, y, noise_x)
        else:
            cost = m.evaluate(x, y)
        costs += cost
        iters += config.num_steps
        if verbose and step % (epoch_size // 10) == 10:
            log.info("%.3f perplexity: %.3f speed: %.0f wps" %
                     (step * 1.0 / epoch_size, np.exp(costs / iters),
                      iters * config.batch_size / (time.time() - start_time)))
    return np.exp(costs / iters)
def run_epoch(data, config, is_train, verbose=False):
    """Run the model on the given data."""
    epoch_size = ((len(data) // config.batch_size) - 1) // config.num_steps
    start_time = time.time()
    costs = 0.0
    iters = 0
    for hidden_state in hidden_states:
        hidden_state.set_value(np.zeros_like(hidden_state.get_value()))
    for step, (x, y) in enumerate(
            data_iterator(data, config.batch_size, config.num_steps)):
        if is_train:
            noise_x = get_noise_x(x, config.drop_x)
            cost = train(x, y, noise_x)
        else:
            cost = evaluate(x, y)
        costs += cost
        iters += config.num_steps
        if verbose and step % (epoch_size // 10) == 10:
            print("%.3f perplexity: %.3f speed: %.0f wps" %
                  (step * 1.0 / epoch_size, np.exp(costs / iters),
                   iters * config.batch_size / (time.time() - start_time)))
    return np.exp(costs / iters)