コード例 #1
0
def _multilayer_conv1d(inputs, hparams, num_targets, reuse=tf.AUTO_REUSE):
    """Apply multiple conv1d layers with input normalization."""
    _, equation = equations.from_hparams(hparams)
    assert_consistent_solution(equation, inputs)

    net = inputs[:, :, tf.newaxis]
    net /= equation.standard_deviation

    activation = _NONLINEARITIES[hparams.nonlinearity]
    for _ in range(hparams.num_layers - 1):
        net = layers.conv1d_periodic_layer(net,
                                           filters=hparams.filter_size,
                                           kernel_size=hparams.kernel_size,
                                           activation=activation,
                                           center=True)
    if hparams.num_layers == 0:
        raise NotImplementedError('not implemented yet')
    net = layers.conv1d_periodic_layer(net,
                                       filters=num_targets,
                                       kernel_size=hparams.kernel_size,
                                       activation=None,
                                       center=True)
    return net
コード例 #2
0
def predict_coefficients(inputs: tf.Tensor,
                         hparams: tf.contrib.training.HParams,
                         reuse: object = tf.AUTO_REUSE) -> tf.Tensor:
    """Predict finite difference coefficients with a neural networks.

  Args:
    inputs: float32 Tensor with dimensions [batch, x].
    hparams: model hyperparameters.
    reuse: whether or not to reuse TensorFlow variables.

  Returns:
    Float32 Tensor with dimensions [batch, x, derivative, coefficient].

  Raises:
    ValueError: if inputs does not have the expected size for the equation.
    ValueError: if polynomial accuracy constraints are infeasible.
  """
    # TODO(shoyer): refactor to use layer classes to hold variables, like
    # tf.keras.layers, instead of relying on reuse.
    _, equation = equations.from_hparams(hparams)
    assert_consistent_solution(equation, inputs)

    with tf.variable_scope('predict_coefficients', reuse=reuse):
        num_derivatives = len(equation.DERIVATIVE_ORDERS)

        grid = polynomials.regular_grid(
            equation.GRID_OFFSET,
            derivative_order=0,
            accuracy_order=hparams.coefficient_grid_min_size,
            dx=equation.grid.solution_dx)

        net = inputs[:, :, tf.newaxis]
        net /= equation.standard_deviation

        activation = _NONLINEARITIES[hparams.nonlinearity]

        for _ in range(hparams.num_layers - 1):
            net = layers.conv1d_periodic_layer(net,
                                               filters=hparams.filter_size,
                                               kernel_size=hparams.kernel_size,
                                               activation=activation,
                                               center=True)

        if not hparams.polynomial_accuracy_order:
            if hparams.num_layers == 0:
                raise NotImplementedError

            net = layers.conv1d_periodic_layer(net,
                                               filters=num_derivatives *
                                               grid.size,
                                               kernel_size=hparams.kernel_size,
                                               activation=None,
                                               center=True)
            new_dims = [num_derivatives, grid.size]
            outputs = tf.reshape(
                net, tf.concat([tf.shape(inputs), new_dims], axis=0))
            outputs.set_shape(inputs.shape[:2].concatenate(new_dims))

            if hparams.ensure_unbiased_coefficients:
                if 0 in equation.DERIVATIVE_ORDERS:
                    raise ValueError(
                        'ensure_unbiased not yet supported for 0th order '
                        'spatial derivatives')
                outputs -= tf.reduce_mean(outputs, axis=-1, keepdims=True)

        else:
            poly_accuracy_layers = []

            for derivative_order in equation.DERIVATIVE_ORDERS:
                method = FINITE_VOL if equation.CONSERVATIVE else FINITE_DIFF
                poly_accuracy_layers.append(
                    polynomials.PolynomialAccuracyLayer(
                        grid=grid,
                        method=method,
                        derivative_order=derivative_order,
                        accuracy_order=hparams.polynomial_accuracy_order,
                        out_scale=hparams.polynomial_accuracy_scale))
            input_sizes = [layer.input_size for layer in poly_accuracy_layers]

            if hparams.num_layers > 0:
                net = layers.conv1d_periodic_layer(
                    net,
                    filters=sum(input_sizes),
                    kernel_size=hparams.kernel_size,
                    activation=None,
                    center=True)
            else:
                initializer = tf.initializers.zeros()
                coefficients = tf.get_variable('coefficients',
                                               (sum(input_sizes), ),
                                               initializer=initializer)
                net = tf.tile(coefficients[tf.newaxis, tf.newaxis, :],
                              [tf.shape(inputs)[0], inputs.shape[1].value, 1])

            cum_sizes = np.cumsum(input_sizes)
            starts = [0] + cum_sizes[:-1].tolist()
            stops = cum_sizes.tolist()
            zipped = zip(starts, stops, poly_accuracy_layers)

            outputs = tf.stack([
                layer.apply(net[..., start:stop])
                for start, stop, layer in zipped
            ],
                               axis=-2)
            assert outputs.shape.as_list()[-1] == grid.size

        return outputs