コード例 #1
0
ファイル: training.py プロジェクト: yohai/pde-superresolution
def setup_training(
        snapshots: np.ndarray,
        hparams: tf.contrib.training.HParams) -> Tuple[tf.Tensor, tf.Tensor]:
    """Create Tensors for training.

  Args:
    snapshots: np.ndarray with shape [examples, x] with high-resolution
      training data.
    hparams: hyperparameters for training.

  Returns:
    Tensors for the current loss, and for taking a training step.
  """
    dataset = model.make_dataset(snapshots,
                                 hparams,
                                 dataset_type=model.Dataset.TRAINING)
    tensors = dataset.make_one_shot_iterator().get_next()

    predictions = model.predict_result(tensors['inputs'], hparams)

    loss_per_head = model.loss_per_head(predictions,
                                        labels=tensors['labels'],
                                        baseline=tensors['baseline'],
                                        hparams=hparams)
    loss = model.weighted_loss(loss_per_head, hparams)
    train_step = create_training_step(loss, hparams)

    return loss, train_step
コード例 #2
0
    def __init__(self,
                 snapshots: np.ndarray,
                 hparams: tf.contrib.training.HParams,
                 training: bool = False):
        """Initialize an object for running inference.

    Args:
      snapshots: np.ndarray with shape [examples, x] with high-resolution
        training data.
      hparams: hyperparameters for training.
      training: whether to evaluate on training or validation datasets.
    """
        if training:
            dataset_type = model.Dataset.TRAINING
        else:
            dataset_type = model.Dataset.VALIDATION
        dataset = model.make_dataset(snapshots,
                                     hparams,
                                     dataset_type=dataset_type,
                                     repeat=False,
                                     evaluation=True)
        iterator = dataset.make_initializable_iterator()
        data = iterator.get_next()

        _, coarse_equation = equations.from_hparams(hparams)

        predictions = model.predict_result(data['inputs'], hparams)
        loss_per_head = model.loss_per_head(predictions,
                                            labels=data['labels'],
                                            baseline=data['baseline'],
                                            hparams=hparams)
        loss = model.weighted_loss(loss_per_head, hparams)

        results = dict(data, predictions=predictions)
        metrics = {
            k: tf.contrib.metrics.streaming_concat(v)
            for k, v in results.items()
        }
        metrics['loss'] = tf.metrics.mean(loss)

        space_loss, time_loss, integrated_loss = model.result_unstack(
            loss_per_head, coarse_equation)
        metrics['loss/space_derivatives'] = tf.metrics.mean(space_loss)
        metrics['loss/time_derivative'] = tf.metrics.mean(time_loss)
        if integrated_loss is not None:
            metrics['loss/integrated_solution'] = tf.metrics.mean(
                integrated_loss)

        initializer = tf.group(iterator.initializer,
                               tf.local_variables_initializer())

        self._initializer = initializer
        self._metrics = metrics
コード例 #3
0
def setup_training(dataset, hparams, scale=1.0):
    # predict u, u_x, u_t as in training
    tensors = dataset.make_one_shot_iterator().get_next()

    predictions = model.predict_result(tensors['inputs'], hparams)

    loss_per_head = model.loss_per_head(predictions,
                                        labels=tensors['labels'],
                                        baseline=tensors['baseline'],
                                        hparams=hparams)
    loss = model.weighted_loss(loss_per_head, hparams)
    train_step = pde.training.create_training_step(loss, hparams)
    return loss, train_step