def testCreateModel(self):
    model = genomics_cnn.create_model(
        batch_size=self.params['batch_size'],
        len_seqs=self.params['seq_len'],
        num_classes=self.params['num_classes'],
        num_motifs=self.params['num_motifs'],
        len_motifs=self.params['len_motifs'],
        num_denses=self.params['num_denses'])

    logits = model(self.rand_data)
    loss = tf.reduce_mean(
        tf.keras.losses.sparse_categorical_crossentropy(
            y_true=self.rand_labels, y_pred=logits, from_logits=True))
    self.assertGreater(loss, 0)
    def testCreateDifferentModels(self, one_hot, use_mc_dropout, use_spec_norm,
                                  use_gp_layer):

        if use_spec_norm:
            spec_norm_hparams = {
                'spec_norm_bound': 6.0,
                'spec_norm_iteration': 1
            }
        else:
            spec_norm_hparams = None

        if use_gp_layer:
            gp_layer_hparams = {
                'gp_input_dim': 128,
                'gp_hidden_dim': 1024,
                'gp_scale': 2.0,
                'gp_bias': 0.0,
                'gp_input_normalization': True,
                'gp_cov_discount_factor': 0.999,
                'gp_cov_ridge_penalty': 1e-3,
            }
        else:
            gp_layer_hparams = None

        model = genomics_cnn.create_model(
            batch_size=self.params['batch_size'],
            len_seqs=self.params['seq_len'],
            num_classes=self.params['num_classes'],
            num_motifs=self.params['num_motifs'],
            len_motifs=self.params['len_motifs'],
            num_denses=self.params['num_denses'],
            one_hot=one_hot,
            use_mc_dropout=use_mc_dropout,
            spec_norm_hparams=spec_norm_hparams,
            gp_layer_hparams=gp_layer_hparams)

        logits = model(self.rand_data)
        if isinstance(logits, (tuple, list)):
            logits, covmat = logits
            self.assertEqual(
                covmat.shape,
                (self.params['batch_size'], self.params['batch_size']))
        self.assertEqual(
            logits.shape,
            (self.params['batch_size'], self.params['num_classes']))
Exemplo n.º 3
0
def get(
    model_name: str,
    batch_size: int,
    **hyperparameters) -> tf.keras.Model:
  """Gets a model builder by name.

  Args:
    model_name: Name of the model builder class.
    batch_size: the training batch size.
    **hyperparameters: dict of possible kwargs to be passed to the model
      constructor.

  Returns:
    A model builder class with a method .build(split) which can be called to
    get the tf.data.Dataset, which has elements that are a dict with keys
    'features' and 'labels'.

  Raises:
    ValueError: If model_name is unrecognized.
  """
  logging.info(
      'Building model %s with additional kwargs:\n%s',
      model_name,
      json.dumps(hyperparameters, indent=2, sort_keys=True))
  if model_name not in get_model_names():
    raise ValueError('Unrecognized model type: {!r}'.format(model_name))

  # load from single_model_uncertainty directory
  if model_name == 'genomics_cnn':
    return genomics_cnn.create_model(batch_size, **hyperparameters)
  if model_name == 'wide_resnet':
    return wide_resnet.create_model(batch_size, **hyperparameters)

  # load from uncertainty_baselines directory
  if model_name == 'criteo_mlp':
    return criteo_mlp.create_model(batch_size, **hyperparameters)
  if model_name == 'resnet20':
    return resnet20.create_model(batch_size, **hyperparameters)
  if model_name == 'resnet50':
    return resnet50.create_model(batch_size, **hyperparameters)
  if model_name == 'textcnn':
    return textcnn.create_model(batch_size, **hyperparameters)
  if model_name == 'bert':
    return bert.create_model(batch_size, **hyperparameters)
  def testCreateOptimizer(self, weight_decay):
    model = genomics_cnn.create_model(
        batch_size=self.params['batch_size'],
        len_seqs=self.params['seq_len'],
        num_classes=self.params['num_classes'],
        num_motifs=self.params['num_motifs'],
        len_motifs=self.params['len_motifs'],
        num_denses=self.params['num_denses'])
    optimizer = ub.optimizers.get(
        optimizer_name=self.params['optimizer_name'],
        learning_rate=0.001,
        weight_decay=weight_decay,
        model=model)

    with tf.GradientTape() as tape:
      logits = model(self.rand_data)
      loss = tf.reduce_mean(
          tf.keras.losses.sparse_categorical_crossentropy(
              y_true=self.rand_labels, y_pred=logits, from_logits=True))
      grads = tape.gradient(loss, model.trainable_variables)
      optimizer.apply_gradients(list(zip(grads, model.trainable_variables)))