예제 #1
0
 def test_flatten(self):
     flat_config_str = 'dataset.batch_size=32,dataset.epoch_size=96,dataset.fn=TokenCopyTask,dataset.min_length=10,dataset.vocab_size=10,model.cell.fn=DAGCell,model.optimizer.learning_rate=0.001'  # pylint: disable=line-too-long
     flat_config = {
         'dataset.min_length': 10,
         'dataset.fn': 'TokenCopyTask',
         'model.cell.fn': 'DAGCell',
         'model.optimizer.learning_rate': 0.001,
         'dataset.vocab_size': 10,
         'dataset.epoch_size': 96,
         'dataset.batch_size': 32
     }
     nested_config = {
         'dataset': {
             'batch_size': 32,
             'epoch_size': 96,
             'fn': 'TokenCopyTask',
             'min_length': 10,
             'vocab_size': 10
         },
         'model': {
             'optimizer': {
                 'learning_rate': 0.001
             },
             'cell': {
                 'fn': 'DAGCell'
             }
         }
     }
     self.assertAllEqual(flat_config,
                         configurable.flatten_config(nested_config))
     self.assertAllEqual(flat_config_str,
                         configurable.config_to_string(nested_config))
     self.assertAllEqual(configurable.unflatten_dict(flat_config),
                         nested_config)
예제 #2
0
  def _create_config(hparams):
    """Create trial config and save to disk.

    Args:
      hparams: Nondefault params to merge

    Returns:
      A configurable object `spec` instantiated from the final config.
        spec.config will return the config.
    """
    hparams = hparams or contrib_training.HParams()
    tuned_config = configurable.unflatten_dict(hparams.values())
    pprinter = pprint.PrettyPrinter()
    tf.logging.info('Provided extra params:\n%s',
                    pprinter.pformat(tuned_config))
    try:
      merged_config = configurable.merge(default_config, tuned_config)
      tf.logging.info('Tuned default config:\n%s', merged_config)
    except TypeError as e:
      tf.logging.info(
          'Do not provide same config in both config string and vizier.'
          '  This may lead to type errors.')

      raise e

    # Instantiate a ConfigurableExperiment object.
    experiment_spec = configurable.Configurable.initialize(merged_config)
    tf.logging.info('Final config:\n%s', experiment_spec.config)

    return experiment_spec