def test_get_activation_fn(self):
        """Tests.
        """
        fn = layers.get_activation_fn()
        self.assertEqual(fn, tf.identity)

        fn = layers.get_activation_fn('relu')
        self.assertEqual(fn, tf.nn.relu)

        inputs = tf.random_uniform([64, 100], -5, 20, dtype=tf.int32)

        fn = layers.get_activation_fn('leaky_relu')
        fn_output = fn(inputs)
        ref_output = tf.nn.leaky_relu(inputs)
        with self.test_session() as sess:
            sess.run(tf.global_variables_initializer())
            fn_output_, ref_output_ = sess.run([fn_output, ref_output])
            np.testing.assert_array_equal(fn_output_, ref_output_)

        fn = layers.get_activation_fn('leaky_relu', kwargs={'alpha': 0.1})
        fn_output = fn(inputs)
        ref_output = tf.nn.leaky_relu(inputs, alpha=0.1)
        with self.test_session() as sess:
            sess.run(tf.global_variables_initializer())
            fn_output_, ref_output_ = sess.run([fn_output, ref_output])
            np.testing.assert_array_equal(fn_output_, ref_output_)
Esempio n. 2
0
    def _build_dense_hparams(self):
        ndense = self._hparams.num_dense_layers
        dense_size = _to_list(self._hparams.dense_size, 'dense_size', ndense)

        other_kwargs = self._hparams.other_dense_kwargs or {}
        if isinstance(other_kwargs, HParams):
            other_kwargs = other_kwargs.todict()
        if not isinstance(other_kwargs, dict):
            raise ValueError("hparams['other_dense_kwargs'] must be a dict.")

        dense_hparams = []
        activation_fn = get_activation_fn(
            self._hparams.dense_activation,
            self._hparams.dense_activation_kwargs)
        for i in range(ndense):
            if i == ndense - 1:
                activation_fn = get_activation_fn(
                    self._hparams.final_dense_activation,
                    self._hparams.final_dense_activation_kwargs)

            kwargs_i = {"units": dense_size[i],
                        "activation": activation_fn,
                        "name": "dense_%d" % (i+1)}
            kwargs_i.update(other_kwargs)

            dense_hparams.append({"type": "Dense", "kwargs": kwargs_i})

        return dense_hparams
Esempio n. 3
0
    def _build_conv1d_hparams(self, pool_hparams):
        """Creates the hparams for each of the conv layers usable for
        :func:`texar.core.layers.get_layer`.
        """
        nconv = self._hparams.num_conv_layers
        if len(pool_hparams) != nconv:
            raise ValueError("`pool_hparams` must be of length %d" % nconv)

        filters = _to_list(self._hparams.filters, 'filters', nconv)
        if nconv == 1:
            kernel_size = _to_list(self._hparams.kernel_size)
            if not isinstance(kernel_size[0], (list, tuple)):
                kernel_size = [kernel_size]
        elif nconv > 1:
            kernel_size = _to_list(self._hparams.kernel_size,
                                   'kernel_size', nconv)
            kernel_size = [_to_list(ks) for ks in kernel_size]

        other_kwargs = self._hparams.other_conv_kwargs or {}
        if isinstance(other_kwargs, HParams):
            other_kwargs = other_kwargs.todict()
        if not isinstance(other_kwargs, dict):
            raise ValueError("hparams['other_conv_kwargs'] must be a dict.")

        conv_pool_hparams = []
        activation_fn = get_activation_fn(
            self._hparams.conv_activation,
            self._hparams.conv_activation_kwargs)
        for i in range(nconv):
            hparams_i = []
            names = []
            for ks_ij in kernel_size[i]:
                name = uniquify_str("conv_%d" % (i+1), names)
                names.append(name)
                conv_kwargs_ij = {
                    "filters": filters[i],
                    "kernel_size": ks_ij,
                    "activation": activation_fn,
                    "name": name
                }
                conv_kwargs_ij.update(other_kwargs)
                hparams_i.append(
                    {"type": "Conv1D", "kwargs": conv_kwargs_ij})
            if len(hparams_i) == 1:
                conv_pool_hparams.append([hparams_i[0], pool_hparams[i]])
            else:  # creates MergeLayer
                mrg_kwargs_layers = []
                for hparams_ij in hparams_i:
                    seq_kwargs_j = {"layers": [hparams_ij, pool_hparams[i]]}
                    mrg_kwargs_layers.append(
                        {"type": "SequentialLayer", "kwargs": seq_kwargs_j})
                mrg_hparams = {"type": "MergeLayer",
                               "kwargs": {"layers": mrg_kwargs_layers,
                                          "name": "conv_pool_%d" % (i+1)}}
                conv_pool_hparams.append(mrg_hparams)

        return conv_pool_hparams
Esempio n. 4
0
    def test_get_activation_fn(self):
        r"""Tests.
        """
        fn = layers.get_activation_fn()
        self.assertEqual(fn, None)

        fn = layers.get_activation_fn('relu')
        self.assertEqual(fn, F.relu)

        inputs = torch.randn(64, 100)

        fn = layers.get_activation_fn('leaky_relu')
        fn_output = fn(inputs)
        ref_output = F.leaky_relu(inputs)
        self.assertEqual(torch.all(torch.eq(fn_output, ref_output)), 1)

        fn = layers.get_activation_fn('leaky_relu',
                                      kwargs={'negative_slope': 0.1})
        fn_output = fn(inputs)
        ref_output = F.leaky_relu(inputs, negative_slope=0.1)
        self.assertEqual(torch.all(torch.eq(fn_output, ref_output)), 1)
Esempio n. 5
0
    def _build(self, inputs):
        """Transforms the inputs with an MLP layer and packs the results to have
        the same structure with the decoder state.

        Args:
            inputs: Input (structure of) tensors to be transformed and passed
                to the decoder. Must be a Tensor of shape `[batch_size, ...]`
                or a (nested) tuple of such Tensors.

        Returns:
            A Tensor or a (nested) tuple of Tensors of the same structure of
            the decoder state.
        """
        activation_fn = layers.get_activation_fn(self.hparams.activation_fn)

        output = _mlp_transform(inputs, self._output_size, activation_fn)

        if not self._built:
            self._add_internal_trainable_variables()
            self._built = True

        return output
Esempio n. 6
0
    def _build(self, inputs):
        """Transforms inputs with an MLP layer and packs the results to have
        the same structure as specified by :attr:`output_size`.

        Args:
            inputs: Input (structure of) tensors to be transformed. Must be a
                Tensor of shape `[batch_size, ...]` or a (nested) tuple of
                such Tensors. That is, the first dimension of (each) tensor
                must be the batch dimension.

        Returns:
            A Tensor or a (nested) tuple of Tensors of the same structure of
            `output_size`.
        """
        activation_fn = layers.get_activation_fn(self.hparams.activation_fn)

        output = _mlp_transform(inputs, self._output_size, activation_fn)

        if not self._built:
            self._add_internal_trainable_variables()
            self._built = True

        return output