Example #1
0
    def _build(self, input_shape):
        input_depth = tf.compat.dimension_value(
            tensorshape_util.with_rank_at_least(input_shape, 1)[-1])

        self._input_depth = input_depth

        flow_parts = []
        for i in range(self._num_coupling_layers):
            if self._use_batch_normalization:
                batch_normalization_bijector = bijectors.BatchNormalization()
                flow_parts += [batch_normalization_bijector]

            real_nvp_bijector = bijectors.RealNVP(
                num_masked=input_depth // 2,
                shift_and_log_scale_fn=feedforward_scale_and_log_diag_fn(
                    hidden_layer_sizes=self._hidden_layer_sizes,
                    activation=tf.nn.relu),
                name='real_nvp_{}'.format(i))
            flow_parts += [real_nvp_bijector]

            if i < self._num_coupling_layers - 1:
                permute_bijector = bijectors.Permute(
                    permutation=list(reversed(range(input_depth))),
                    name='permute_{}'.format(i))
                flow_parts += [permute_bijector]

        # bijectors.Chain applies the list of bijectors in the
        # _reverse_ order of what they are inputted, thus [::-1].
        self.flow = bijectors.Chain(flow_parts[::-1])
        self._built = True
Example #2
0
    def build(self):
        D = np.prod(self._event_dims)

        flow = []
        for i in range(self._num_coupling_layers):
            if self._use_batch_normalization:
                batch_normalization_bijector = bijectors.BatchNormalization()
                flow.append(batch_normalization_bijector)

            real_nvp_bijector = bijectors.RealNVP(
                num_masked=D // 2,
                shift_and_log_scale_fn=conditioned_real_nvp_template(
                    hidden_layers=self._hidden_layer_sizes,
                    # TODO: test tf.nn.relu
                    activation=tf.nn.tanh),
                name='real_nvp_{}'.format(i))

            flow.append(real_nvp_bijector)

            if i < self._num_coupling_layers - 1:
                permute_bijector = bijectors.Permute(
                    permutation=list(reversed(range(D))),
                    name='permute_{}'.format(i))
                # TODO(hartikainen): We need to force _is_constant_jacobian due
                # to the event_dim caching. See the issue filed at github:
                # https://github.com/tensorflow/probability/issues/122
                permute_bijector._is_constant_jacobian = False
                flow.append(permute_bijector)

        # Note: bijectors.Chain applies the list of bijectors in the
        # _reverse_ order of what they are inputted.
        self.flow = flow
Example #3
0
    def _build(self, input_shape):
        input_depth = tf.compat.dimension_value(
            tensorshape_util.with_rank_at_least(input_shape, 1)[-1])

        self._input_depth = input_depth

        flow_parts = []
        for i in range(self._num_coupling_layers):
            if self._use_batch_normalization:
                # TODO(hartikainen): Allow other normalizations, e.g.
                # weight normalization?
                batch_normalization_bijector = bijectors.BatchNormalization()
                flow_parts += [batch_normalization_bijector]

            real_nvp_bijector = bijectors.RealNVP(
                fraction_masked={
                    True: 1.0,
                    False: -1.0
                }[i % 2 == 0] * 0.5,
                bijector_fn=FeedforwardBijectorFunction(
                    hidden_layer_sizes=self._hidden_layer_sizes,
                    activation=self._activation),
                name=f'real_nvp_{i}')
            flow_parts += [real_nvp_bijector]

        # bijectors.Chain applies the list of bijectors in the
        # _reverse_ order of what they are inputted, thus [::-1].
        self.flow = bijectors.Chain(flow_parts[::-1])
        self._built = True