Esempio n. 1
0
def baseline_space_derivatives(inputs: tf.Tensor,
                               equation: equations.Equation,
                               accuracy_order: int = None) -> tf.Tensor:
    """Calculate spatial derivatives using a baseline metohd."""
    assert_consistent_solution(equation, inputs)

    spatial_derivatives_list = []
    for derivative_name, derivative_order in zip(equation.DERIVATIVE_NAMES,
                                                 equation.DERIVATIVE_ORDERS):

        if accuracy_order is None:
            # use the best baseline method
            assert equation.exact_type() is type(equation)
            if equation.EXACT_METHOD is equations.ExactMethod.POLYNOMIAL:
                grid = (0.5 + np.arange(-3, 3)) * equation.grid.solution_dx
                method = FINITE_VOL if equation.CONSERVATIVE else FINITE_DIFF
                derivative = polynomials.reconstruct(inputs, grid, method,
                                                     derivative_order)
            elif equation.EXACT_METHOD is equations.ExactMethod.SPECTRAL:
                derivative = duckarray.spectral_derivative(
                    inputs, derivative_order, equation.grid.period)
            elif equation.EXACT_METHOD is equations.ExactMethod.WENO:
                if derivative_name == 'u_minus':
                    derivative = duckarray.roll(weno.reconstruct_left(inputs),
                                                1,
                                                axis=-1)
                elif derivative_name == 'u_plus':
                    derivative = duckarray.roll(weno.reconstruct_right(inputs),
                                                1,
                                                axis=-1)
                else:
                    assert derivative_name == 'u_x'
                    grid = polynomials.regular_grid(
                        grid_offset=equation.GRID_OFFSET,
                        derivative_order=derivative_order,
                        accuracy_order=3,
                        dx=equation.grid.solution_dx)
                    method = FINITE_VOL if equation.CONSERVATIVE else FINITE_DIFF
                    derivative = polynomials.reconstruct(
                        inputs, grid, method, derivative_order)

        else:
            # explicit accuracy order provided
            assert type(equation) not in equations.FLUX_EQUATION_TYPES
            grid = polynomials.regular_grid(grid_offset=equation.GRID_OFFSET,
                                            derivative_order=derivative_order,
                                            accuracy_order=accuracy_order,
                                            dx=equation.grid.solution_dx)
            method = FINITE_VOL if equation.CONSERVATIVE else FINITE_DIFF
            derivative = polynomials.reconstruct(inputs, grid, method,
                                                 derivative_order)

        spatial_derivatives_list.append(derivative)
    return tf.stack(spatial_derivatives_list, axis=-1)
Esempio n. 2
0
def baseline_space_derivatives(inputs: tf.Tensor,
                               equation: equations.Equation,
                               accuracy_order: int = None) -> tf.Tensor:
    """Calculate spatial derivatives using a baseline metohd."""
    assert_consistent_solution(equation, inputs)

    spatial_derivatives_list = []
    for derivative_order in equation.DERIVATIVE_ORDERS:

        if accuracy_order is None:
            # use the best baseline method
            if equation.BASELINE is equations.Baseline.POLYNOMIAL:
                grid = (0.5 + np.arange(-3, 3)) * equation.grid.solution_dx
                method = FINITE_VOL if equation.CONSERVATIVE else FINITE_DIFF
                derivative = polynomials.reconstruct(inputs, grid, method,
                                                     derivative_order)
            elif equation.BASELINE is equations.Baseline.SPECTRAL:
                derivative = duckarray.spectral_derivative(
                    inputs, derivative_order, equation.grid.period)
            else:
                raise AssertionError('unknown baseline method')

        else:
            # explicit accuracy order provided
            grid = polynomials.regular_grid(grid_offset=equation.GRID_OFFSET,
                                            derivative_order=derivative_order,
                                            accuracy_order=accuracy_order,
                                            dx=equation.grid.solution_dx)
            method = FINITE_VOL if equation.CONSERVATIVE else FINITE_DIFF
            derivative = polynomials.reconstruct(inputs, grid, method,
                                                 derivative_order)

        spatial_derivatives_list.append(derivative)
    return tf.stack(spatial_derivatives_list, axis=-1)
 def test_regular_grid(self,
                       grid_offset,
                       derivative_order,
                       expected_grid,
                       accuracy_order=1):
     actual_grid = polynomials.regular_grid(grid_offset, derivative_order,
                                            accuracy_order)
     np.testing.assert_allclose(actual_grid, expected_grid)
Esempio n. 4
0
def predict_coefficients(inputs: tf.Tensor,
                         hparams: tf.contrib.training.HParams,
                         reuse: object = tf.AUTO_REUSE) -> tf.Tensor:
    """Predict finite difference coefficients with a neural networks.

  Args:
    inputs: float32 Tensor with dimensions [batch, x].
    hparams: model hyperparameters.
    reuse: whether or not to reuse TensorFlow variables.

  Returns:
    Float32 Tensor with dimensions [batch, x, derivative, coefficient].

  Raises:
    ValueError: if inputs does not have the expected size for the equation.
    ValueError: if polynomial accuracy constraints are infeasible.
  """
    # TODO(shoyer): refactor to use layer classes to hold variables, like
    # tf.keras.layers, instead of relying on reuse.
    _, equation = equations.from_hparams(hparams)
    assert_consistent_solution(equation, inputs)

    with tf.variable_scope('predict_coefficients', reuse=reuse):
        num_derivatives = len(equation.DERIVATIVE_ORDERS)

        grid = polynomials.regular_grid(
            equation.GRID_OFFSET,
            derivative_order=0,
            accuracy_order=hparams.coefficient_grid_min_size,
            dx=equation.grid.solution_dx)

        net = inputs[:, :, tf.newaxis]
        net /= equation.standard_deviation

        activation = _NONLINEARITIES[hparams.nonlinearity]

        for _ in range(hparams.num_layers - 1):
            net = layers.conv1d_periodic_layer(net,
                                               filters=hparams.filter_size,
                                               kernel_size=hparams.kernel_size,
                                               activation=activation,
                                               center=True)

        if not hparams.polynomial_accuracy_order:
            if hparams.num_layers == 0:
                raise NotImplementedError

            net = layers.conv1d_periodic_layer(net,
                                               filters=num_derivatives *
                                               grid.size,
                                               kernel_size=hparams.kernel_size,
                                               activation=None,
                                               center=True)
            new_dims = [num_derivatives, grid.size]
            outputs = tf.reshape(
                net, tf.concat([tf.shape(inputs), new_dims], axis=0))
            outputs.set_shape(inputs.shape[:2].concatenate(new_dims))

            if hparams.ensure_unbiased_coefficients:
                if 0 in equation.DERIVATIVE_ORDERS:
                    raise ValueError(
                        'ensure_unbiased not yet supported for 0th order '
                        'spatial derivatives')
                outputs -= tf.reduce_mean(outputs, axis=-1, keepdims=True)

        else:
            poly_accuracy_layers = []

            for derivative_order in equation.DERIVATIVE_ORDERS:
                method = FINITE_VOL if equation.CONSERVATIVE else FINITE_DIFF
                poly_accuracy_layers.append(
                    polynomials.PolynomialAccuracyLayer(
                        grid=grid,
                        method=method,
                        derivative_order=derivative_order,
                        accuracy_order=hparams.polynomial_accuracy_order,
                        out_scale=hparams.polynomial_accuracy_scale))
            input_sizes = [layer.input_size for layer in poly_accuracy_layers]

            if hparams.num_layers > 0:
                net = layers.conv1d_periodic_layer(
                    net,
                    filters=sum(input_sizes),
                    kernel_size=hparams.kernel_size,
                    activation=None,
                    center=True)
            else:
                initializer = tf.initializers.zeros()
                coefficients = tf.get_variable('coefficients',
                                               (sum(input_sizes), ),
                                               initializer=initializer)
                net = tf.tile(coefficients[tf.newaxis, tf.newaxis, :],
                              [tf.shape(inputs)[0], inputs.shape[1].value, 1])

            cum_sizes = np.cumsum(input_sizes)
            starts = [0] + cum_sizes[:-1].tolist()
            stops = cum_sizes.tolist()
            zipped = zip(starts, stops, poly_accuracy_layers)

            outputs = tf.stack([
                layer.apply(net[..., start:stop])
                for start, stop, layer in zipped
            ],
                               axis=-2)
            assert outputs.shape.as_list()[-1] == grid.size

        return outputs