Beispiel #1
0
def setup_training(
        snapshots: np.ndarray,
        hparams: tf.contrib.training.HParams) -> Tuple[tf.Tensor, tf.Tensor]:
    """Create Tensors for training.

  Args:
    snapshots: np.ndarray with shape [examples, x] with high-resolution
      training data.
    hparams: hyperparameters for training.

  Returns:
    Tensors for the current loss, and for taking a training step.
  """
    dataset = model.make_dataset(snapshots,
                                 hparams,
                                 dataset_type=model.Dataset.TRAINING)
    tensors = dataset.make_one_shot_iterator().get_next()

    predictions = model.predict_result(tensors['inputs'], hparams)

    loss_per_head = model.loss_per_head(predictions,
                                        labels=tensors['labels'],
                                        baseline=tensors['baseline'],
                                        hparams=hparams)
    loss = model.weighted_loss(loss_per_head, hparams)
    train_step = create_training_step(loss, hparams)

    return loss, train_step
    def __init__(self,
                 snapshots: np.ndarray,
                 hparams: tf.contrib.training.HParams,
                 training: bool = False):
        """Initialize an object for running inference.

    Args:
      snapshots: np.ndarray with shape [examples, x] with high-resolution
        training data.
      hparams: hyperparameters for training.
      training: whether to evaluate on training or validation datasets.
    """
        if training:
            dataset_type = model.Dataset.TRAINING
        else:
            dataset_type = model.Dataset.VALIDATION
        dataset = model.make_dataset(snapshots,
                                     hparams,
                                     dataset_type=dataset_type,
                                     repeat=False,
                                     evaluation=True)
        iterator = dataset.make_initializable_iterator()
        data = iterator.get_next()

        _, coarse_equation = equations.from_hparams(hparams)

        predictions = model.predict_result(data['inputs'], hparams)
        loss_per_head = model.loss_per_head(predictions,
                                            labels=data['labels'],
                                            baseline=data['baseline'],
                                            hparams=hparams)
        loss = model.weighted_loss(loss_per_head, hparams)

        results = dict(data, predictions=predictions)
        metrics = {
            k: tf.contrib.metrics.streaming_concat(v)
            for k, v in results.items()
        }
        metrics['loss'] = tf.metrics.mean(loss)

        space_loss, time_loss, integrated_loss = model.result_unstack(
            loss_per_head, coarse_equation)
        metrics['loss/space_derivatives'] = tf.metrics.mean(space_loss)
        metrics['loss/time_derivative'] = tf.metrics.mean(time_loss)
        if integrated_loss is not None:
            metrics['loss/integrated_solution'] = tf.metrics.mean(
                integrated_loss)

        initializer = tf.group(iterator.initializer,
                               tf.local_variables_initializer())

        self._initializer = initializer
        self._metrics = metrics
Beispiel #3
0
def setup_training(dataset, hparams, scale=1.0):
    # predict u, u_x, u_t as in training
    tensors = dataset.make_one_shot_iterator().get_next()

    predictions = model.predict_result(tensors['inputs'], hparams)

    loss_per_head = model.loss_per_head(predictions,
                                        labels=tensors['labels'],
                                        baseline=tensors['baseline'],
                                        hparams=hparams)
    loss = model.weighted_loss(loss_per_head, hparams)
    train_step = pde.training.create_training_step(loss, hparams)
    return loss, train_step
Beispiel #4
0
    if (step + 1) % hparams.eval_interval == 0:
        print(step, sess.run(loss))

# save
save_path = 'tmp_model.ckpt'
saver = tf.train.Saver()
saver.save(sess, save_path)

# prediction
demo_dataset = pde.model.make_dataset(snapshots,
                                      hparams,
                                      Dataset.VALIDATION,
                                      repeat=False,
                                      evaluation=True)
tensors = demo_dataset.make_one_shot_iterator().get_next()
tensors['predictions'] = model.predict_result(tensors['inputs'], hparams)

array_list = []
while True:
    try:
        array_list.append(sess.run(tensors))
    except tf.errors.OutOfRangeError:
        break

arrays = {k: np.concatenate([d[k] for d in array_list]) for k in array_list[0]}

# do some simple plots
# inputs = arrays['inputs']
# predictions = arrays['predictions'][:, :, 0]
# labels = arrays['labels'][:, :, 0]
# print("inputs shape: ", inputs.shape)