コード例 #1
0
 def testTransformerAEOnDVQ(self):
     batch_size = 3
     input_length = 5
     target_length = 16
     vocab_size = 9
     hparams = transformer_vae.transformer_ae_small()
     hparams.bottleneck_kind = "dvq"
     hparams.dp_strength = 0
     p_hparams = problem_hparams.test_problem_hparams(
         vocab_size, vocab_size, hparams)
     hparams.problem_hparams = p_hparams
     inputs = np.random.randint(vocab_size,
                                size=(batch_size, input_length, 1, 1))
     targets = np.random.randint(vocab_size,
                                 size=(batch_size, target_length, 1, 1))
     features = {
         "inputs": tf.constant(inputs, dtype=tf.int32),
         "targets": tf.constant(targets, dtype=tf.int32),
         "target_space_id": tf.constant(1, dtype=tf.int32),
     }
     tf.train.create_global_step()
     model = transformer_vae.TransformerAE(hparams,
                                           tf.estimator.ModeKeys.TRAIN,
                                           p_hparams)
     logits, _ = model(features)
     with self.test_session() as session:
         session.run(tf.global_variables_initializer())
         logits_val = session.run(logits)
         self.assertEqual(logits_val.shape,
                          (batch_size, target_length, 1, 1, vocab_size))
コード例 #2
0
 def testTransformerAEOnDVQ(self):
   batch_size = 3
   input_length = 5
   target_length = 16
   vocab_size = 9
   hparams = transformer_vae.transformer_ae_small()
   hparams.bottleneck_kind = "dvq"
   hparams.dp_strength = 0
   p_hparams = problem_hparams.test_problem_hparams(vocab_size, vocab_size)
   hparams.problem_hparams = p_hparams
   inputs = -1 + np.random.random_integers(
       vocab_size, size=(batch_size, input_length, 1, 1))
   targets = -1 + np.random.random_integers(
       vocab_size, size=(batch_size, target_length, 1, 1))
   features = {
       "inputs": tf.constant(inputs, dtype=tf.int32),
       "targets": tf.constant(targets, dtype=tf.int32),
       "target_space_id": tf.constant(1, dtype=tf.int32),
   }
   tf.train.create_global_step()
   model = transformer_vae.TransformerAE(hparams, tf.estimator.ModeKeys.TRAIN,
                                         p_hparams)
   logits, _ = model(features)
   with self.test_session() as session:
     session.run(tf.global_variables_initializer())
     logits_val = session.run(logits)
     self.assertEqual(logits_val.shape,
                      (batch_size, target_length, 1, 1, vocab_size))
コード例 #3
0
def cycle_gan_small():
    """Set of hyperparameters."""
    hparams = transformer_vae.transformer_ae_small()
    hparams.batch_size = 2048 * 2 / 2
    hparams.input_modalities = "inputs:symbol:identity"
    hparams.target_modality = "symbol:identity"
    hparams.weight_decay = 3.0
    hparams.learning_rate = 0.05
    hparams.kl_warmup_steps = 5000
    hparams.learning_rate_warmup_steps = 3000
    hparams.add_hparam("vocab_size", 66)  # Vocabulary size, need to set here.
    hparams.add_hparam("cycle_loss_multiplier1", 10.0)
    hparams.add_hparam("cycle_loss_multiplier2", 10.0)
    return hparams
コード例 #4
0
ファイル: cycle_gan.py プロジェクト: kltony/tensor2tensor
def cycle_gan_small():
  """Set of hyperparameters."""
  hparams = transformer_vae.transformer_ae_small()
  hparams.batch_size = 2048
  hparams.input_modalities = "inputs:symbol:identity"
  hparams.target_modality = "symbol:identity"
  hparams.weight_decay = 3.0
  hparams.learning_rate = 0.05
  hparams.kl_warmup_steps = 5000
  hparams.learning_rate_warmup_steps = 3000
  hparams.add_hparam("vocab_size", 66)  # Vocabulary size, need to set here.
  hparams.add_hparam("cycle_loss_multiplier1", 10.0)
  hparams.add_hparam("cycle_loss_multiplier2", 10.0)
  return hparams
コード例 #5
0
def cycle_gan_small():
  """Set of hyperparameters."""
  hparams = transformer_vae.transformer_ae_small()
  hparams.batch_size = 2048
  hparams.modality = {
      "inputs": modalities.ModalityType.IDENTITY_SYMBOL,
      "targets": modalities.ModalityType.IDENTITY_SYMBOL,
  }
  hparams.weight_decay = 3.0
  hparams.learning_rate = 0.05
  hparams.kl_warmup_steps = 5000
  hparams.learning_rate_warmup_steps = 3000
  hparams.add_hparam("vocab_size", 66)  # Vocabulary size, need to set here.
  hparams.add_hparam("cycle_loss_multiplier1", 10.0)
  hparams.add_hparam("cycle_loss_multiplier2", 10.0)
  return hparams
コード例 #6
0
def cycle_gan_yr():
  """Set of hyperparameters."""
  vocab_sz=2000#6381 # 1471
  hparams = transformer_vae.transformer_ae_small()
  hparams.batch_size = 2048
  hparams.hidden_size = 128
  hparams.filter_size = 128
  hparams.num_hidden_layers=2
  hparams.v_size=128
  hparams.input_modalities = "inputs:symbol:identity"
  hparams.target_modality = "symbol:identity"
  hparams.weight_decay = 3.0
  hparams.learning_rate = 0.05
  hparams.kl_warmup_steps = 5000
  hparams.learning_rate_warmup_steps = 3000
  hparams.add_hparam("vocab_size", vocab_sz)
  hparams.add_hparam("cycle_loss_multiplier1", 10.0)
  hparams.add_hparam("cycle_loss_multiplier2", 10.0)
  return hparams