Пример #1
0
 def setUp(self):
     super(EvaluateTest, self).setUp()
     self.model_dir = self.create_tempdir(
         "model", cleanup=absltest.TempFileCleanup.OFF).full_path
     model_config = resources.get_file(
         "config/tests/methods/unsupervised/train_test.gin")
     train.train_with_gin(self.model_dir, True, [model_config])
     self.output_dir = self.create_tempdir(
         "output", cleanup=absltest.TempFileCleanup.OFF).full_path
     postprocess_config = resources.get_file(
         "config/tests/postprocessing/postprocess_test_configs/mean.gin")
     postprocess.postprocess_with_gin(self.model_dir, self.output_dir, True,
                                      [postprocess_config])
Пример #2
0
 def get_model_config(self, model_num=0):
     """Returns model bindings and config file."""
     config = get_config()[model_num]
     model_bindings = h.to_bindings(config)
     model_config_file = resources.get_file(
         "config/balanced_vae_study_v1/model_configs/shared.gin")
     return model_bindings, model_config_file
def _s2_dip_config_generator():
    """Yields all model configurations that should be tested."""
    model_config_path = resources.get_file(
        "config/tests/methods/semi_supervised/train_test.gin")
    # Test for s2_dip_vae.
    s2_dip_vae_i = [
        "model.model = @s2_dip_vae", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9", "s2_dip_vae.lambda_d_factor = 10",
        "s2_dip_vae.dip_type = 'i'", "s2_dip_vae.lambda_od = 10.",
        "s2_dip_vae.gamma_sup = 4", "annealer.iteration_threshold = 1",
        "model.model_seed = 0", "model.unsupervised_data_seed = 0",
        "model.supervised_data_seed = 0", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9"
    ]
    for anneal_loss in ANNEAL_LOSS_LIST:
        yield [model_config_path], s2_dip_vae_i + anneal_loss

    s2_dip_vae_ii = [
        "model.model = @s2_dip_vae", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9", "s2_dip_vae.lambda_d_factor = 1",
        "s2_dip_vae.dip_type = 'ii'", "s2_dip_vae.lambda_od = 10.",
        "s2_dip_vae.gamma_sup = 4", "annealer.iteration_threshold = 1",
        "model.model_seed = 0", "model.unsupervised_data_seed = 0",
        "model.supervised_data_seed = 0", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9"
    ]
    for anneal_loss in ANNEAL_LOSS_LIST:
        yield [model_config_path], s2_dip_vae_ii + anneal_loss
Пример #4
0
 def setUp(self):
     super(PostprocessTest, self).setUp()
     self.model_dir = self.create_tempdir(
         "model", cleanup=absltest.TempFileCleanup.OFF).full_path
     train.train_with_gin(self.model_dir, True, [
         resources.get_file(
             "config/tests/methods/unsupervised/train_test.gin")
     ], [])
Пример #5
0
 def get_model_config(self, model_num=0):
     """Returns model bindings and config file."""
     config = get_config()[model_num]
     model_bindings = h.to_bindings(config)
     model_config_file = resources.get_file(
         "config/abstract_reasoning_study_v1/stage1/model_configs/shared.gin"
     )
     return model_bindings, model_config_file
Пример #6
0
def question_mark():
  """Returns an image of the question mark."""
  # Cache the image so it is not always reloaded.
  if QUESTION_MARK[0] is None:
    with tf.gfile.Open(
        resources.get_file("google/abstract_reasoning/data/question_mark.png"),
        "rb") as f:
      QUESTION_MARK[0] = np.array(Image.open(f).convert("RGB")) * 1.0 / 255.
  return QUESTION_MARK[0]
Пример #7
0
def _config_generator():
  """Yields all model configurations that should be tested."""
  model_config_path = resources.get_file(
      "config/tests/methods/unsupervised/train_test.gin")
  # Test different losses.
  for loss in ["@bernoulli_loss", "@l2_loss"]:
    rec_loss = ["reconstruction_loss.loss_fn = " + loss]
    # Test different activations.
    for act in ["'logits'", "'tanh'"]:
      rec_loss += ["reconstruction_loss.activation = " + act]
      latent_dim = ["encoder.num_latent = 10"]
      # Test different architectures.
      for encoder, decoder in [("@fc_encoder", "@fc_decoder"),
                               ("@conv_encoder", "@deconv_decoder")]:
        architectures = \
            ["encoder.encoder_fn = " + encoder,
             "decoder.decoder_fn = " + decoder]
        structure = rec_loss + architectures + latent_dim
        # Train a BetaVAE with all these settings.
        beta_vae = ["model.model = @vae()", "vae.beta = 10."]
        yield [model_config_path], beta_vae + structure

  # Test all the other different models.
  # Test AnnealedVAE.
  annealed_vae = [
      "model.model = @annealed_vae()", "annealed_vae.c_max = 25",
      "annealed_vae.iteration_threshold = 100000", "annealed_vae.gamma = 1000"
  ]
  yield [model_config_path], annealed_vae

  # Test FactorVAE.
  factor_vae = [
      "model.model = @factor_vae()",
      "discriminator.discriminator_fn = @fc_discriminator",
      "discriminator_optimizer.optimizer_fn = @AdamOptimizer",
      "factor_vae.gamma = 10."
  ]
  yield [model_config_path], factor_vae

  # Test DIP-VAE.
  dip_vae_i = [
      "model.model = @dip_vae()", "dip_vae.lambda_d_factor = 10",
      "dip_vae.dip_type = 'i'", "dip_vae.lambda_od = 10."
  ]
  yield [model_config_path], dip_vae_i

  dip_vae_ii = [
      "model.model = @dip_vae()", "dip_vae.lambda_d_factor = 1",
      "dip_vae.dip_type = 'ii'", "dip_vae.lambda_od = 10."
  ]
  yield [model_config_path], dip_vae_ii

  # Test BetaTCVAE.
  beta_tc_vae = ["model.model = @beta_tc_vae()", "beta_tc_vae.beta = 10."]
  yield [model_config_path], beta_tc_vae
def _supervised_config_generator():
    """Yields all model configurations that should be tested."""
    model_config_path = resources.get_file(
        "config/tests/methods/semi_supervised/train_test.gin")
    # Test for s2_vae.
    supervised = [
        "model.model = @supervised", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9", "annealer.iteration_threshold = 1",
        "model.model_seed = 0", "model.unsupervised_data_seed = 0",
        "model.supervised_data_seed = 0", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9"
    ]
    for anneal_loss in ANNEAL_LOSS_LIST:
        yield [model_config_path], supervised + anneal_loss
Пример #9
0
    def setUp(self):
        super(EvaluateTest, self).setUp()
        self.model1_dir = self.create_tempdir(
            "model1/model", cleanup=absltest.TempFileCleanup.OFF).full_path
        self.model2_dir = self.create_tempdir(
            "model2/model", cleanup=absltest.TempFileCleanup.OFF).full_path
        model_config = resources.get_file(
            "config/tests/methods/unsupervised/train_test.gin")
        gin.clear_config()
        train.train_with_gin(self.model1_dir, True, [model_config])
        train.train_with_gin(self.model2_dir, True, [model_config])

        self.output_dir = self.create_tempdir(
            "output", cleanup=absltest.TempFileCleanup.OFF).full_path
Пример #10
0
 def test_visualize_sigmoid(self, activation):
   activation_binding = (
       "reconstruction_loss.activation = '{}'".format(activation))
   self.model_dir = self.create_tempdir(
       "model_{}".format(activation),
       cleanup=absltest.TempFileCleanup.OFF).full_path
   train.train_with_gin(self.model_dir, True, [
       resources.get_file("config/tests/methods/unsupervised/train_test.gin")
   ], [activation_binding])
   visualize_model.visualize(
       self.model_dir,
       self.create_tempdir("visualization_{}".format(activation)).full_path,
       True,
       num_animations=1,
       num_frames=4)
def _vae_config_generator():
    """Yields all model configurations that should be tested."""
    model_config_path = resources.get_file(
        "config/tests/methods/semi_supervised/train_test.gin")
    # Test for vae, both unsupervised and s2 methods runs with the s2
    # training_lib.
    vae = [
        "model.model = @vae", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9", "vae.beta = 10.",
        "annealer.iteration_threshold = 1", "model.model_seed = 0",
        "model.unsupervised_data_seed = 0", "model.supervised_data_seed = 0",
        "model.num_labelled_samples = 100", "model.train_percentage = 0.9"
    ]
    for anneal_loss in ANNEAL_LOSS_LIST:
        yield [model_config_path], vae + anneal_loss
def add_below(image, padding_px=10, value=None):
  """Adds a footer below."""
  if len(image.shape) == 2:
    image = np.expand_dims(image, -1)
  if image.shape[2] == 1:
    image = np.repeat(image, 3, 2)
  if image.shape[2] != 3:
    raise ValueError("Could not convert image to have three channels.")
  with tf.gfile.Open(resources.get_file("disentanglement_lib.png"), "rb") as f:
    footer = np.array(Image.open(f).convert("RGB")) * 1.0 / 255.
  missing_px = image.shape[1] - footer.shape[1]
  if missing_px < 0:
    return image
  if missing_px > 0:
    padding_arr = padding_array(footer, missing_px, axis=1, value=value)
    footer = np.concatenate([padding_arr, footer], axis=1)
  return padded_stack([image, footer], padding_px, axis=0, value=value)
def _s2_factor_config_generator():
    """Yields all model configurations that should be tested."""
    model_config_path = resources.get_file(
        "config/tests/methods/semi_supervised/train_test.gin")
    # Test for s2_factor_vae.
    s2_factor_vae = [
        "model.model = @s2_factor_vae", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9", "s2_factor_vae.gamma = 4",
        "s2_factor_vae.gamma_sup = 4", "annealer.iteration_threshold = 1",
        "discriminator.discriminator_fn = @fc_discriminator",
        "discriminator_optimizer.optimizer_fn = @AdamOptimizer",
        "model.model_seed = 0", "model.unsupervised_data_seed = 0",
        "model.supervised_data_seed = 0", "model.num_labelled_samples = 100",
        "model.train_percentage = 0.9"
    ]
    for anneal_loss in ANNEAL_LOSS_LIST:
        yield [model_config_path], s2_factor_vae + anneal_loss
def _config_generator():
    """Yields all model configurations that should be tested."""
    model_config_path = resources.get_file(
        "config/tests/methods/unsupervised/train_test.gin")
    for model in MODELS_TEST:
        yield [model_config_path], model
Пример #15
0
 def get_model_config(self, model_num=0):
     """Returns model bindings and config file."""
     return [], resources.get_file(
         "config/tests/methods/unsupervised/train_test.gin")
Пример #16
0
 def get_postprocess_config_files(self):
     """Returns postprocessing config files."""
     return [resources.get_file("config/unsupervised_study_v1/postprocess_configs/mean.gin")]