Exemplo n.º 1
0
    def test_uniquify_str(self):
        r"""Tests :func:`texar.torch.utils.uniquify_str`.
        """
        str_set = ['str']
        unique_str = utils.uniquify_str('str', str_set)
        self.assertEqual(unique_str, 'str_1')

        str_set.append('str_1')
        str_set.append('str_2')
        unique_str = utils.uniquify_str('str', str_set)
        self.assertEqual(unique_str, 'str_3')
    def _build_layers(self,
                      layers: Optional[nn.ModuleList] = None,
                      layer_hparams: Optional[List[
                          Union[HParams, Dict[str, Any]]]] = None):
        r"""Builds layers.

        Either :attr:`layer_hparams` or :attr:`layers` must be
        provided. If both are given, :attr:`layers` will be used.

        Args:
            layers (optional): A list of layer instances supplied as an instance
                of :torch_nn:`ModuleList`.
            layer_hparams (optional): A list of layer hparams, each to which
                is fed to :func:`~texar.torch.core.layers.get_layer` to create
                the layer instance.
        """
        if layers is not None:
            self._layers = layers
        else:
            if layer_hparams is None:
                raise ValueError(
                    'Either `layer` or `layer_hparams` is required.')
            self._layers = nn.ModuleList()
            for _, hparams in enumerate(layer_hparams):
                self._layers.append(get_layer(hparams=hparams))

        for layer in self._layers:
            layer_name = uniquify_str(layer.__class__.__name__,
                                      self._layer_names)
            self._layer_names.append(layer_name)
            self._layers_by_name[layer_name] = layer
    def append_layer(self, layer: Union[nn.Module, HParams, Dict[str, Any]]):
        r"""Appends a layer to the end of the network.

        Args:
            layer: A subclass of :torch_nn:`Module`, or a dict of layer
                hyperparameters.
        """
        layer_ = layer
        if not isinstance(layer_, nn.Module):
            layer_ = get_layer(hparams=layer_)
        self._layers.append(layer_)
        layer_name = uniquify_str(layer_.__class__.__name__, self._layer_names)
        self._layer_names.append(layer_name)
        self._layers_by_name[layer_name] = layer_
Exemplo n.º 4
0
def get_layer(hparams: Union[HParams, Dict[str, Any]]) -> nn.Module:
    r"""Makes a layer instance.

    The layer must be an instance of :torch_nn:`Module`.

    Args:
        hparams (dict or HParams): Hyperparameters of the layer, with
            structure:

            .. code-block:: python

                {
                    "type": "LayerClass",
                    "kwargs": {
                        # Keyword arguments of the layer class
                        # ...
                    }
                }

            Here:

            `"type"`: str or layer class or layer instance
                The layer type. This can be

                - The string name or full module path of a layer class. If
                  the class name is provided, the class must be in module
                  :torch_nn:`Module`, :mod:`texar.torch.core`, or
                  :mod:`texar.torch.custom`.
                - A layer class.
                - An instance of a layer class.

                For example

                .. code-block:: python

                    "type": "Conv1D"                               # class name
                    "type": "texar.torch.core.MaxReducePooling1D"  # module path
                    "type": "my_module.MyLayer"                    # module path
                    "type": torch.nn.Module.Linear                 # class
                    "type": Conv1D(filters=10, kernel_size=2)  # cell instance
                    "type": MyLayer(...)                       # cell instance

            `"kwargs"`: dict
                A dictionary of keyword arguments for constructor of the
                layer class. Ignored if :attr:`"type"` is a layer instance.

                - Arguments named "activation" can be a callable, or a `str` of
                  the name or module path to the activation function.
                - Arguments named "\*_regularizer" and "\*_initializer" can be a
                  class instance, or a `dict` of hyperparameters of respective
                  regularizers and initializers. See
                - Arguments named "\*_constraint" can be a callable, or a `str`
                  of the name or full path to the constraint function.

    Returns:
        A layer instance. If ``hparams["type"]`` is a layer instance, returns it
        directly.

    Raises:
        ValueError: If :attr:`hparams` is `None`.
        ValueError: If the resulting layer is not an instance of
            :torch_nn:`Module`.
    """
    if hparams is None:
        raise ValueError("`hparams` must not be `None`.")

    layer_type = hparams["type"]
    if not is_str(layer_type) and not isinstance(layer_type, type):
        layer = layer_type
    else:
        layer_modules = ["torch.nn", "texar.torch.core", "texar.torch.custom"]
        layer_class: Type[nn.Module] = utils.check_or_get_class(
            layer_type, layer_modules)
        if isinstance(hparams, dict):
            if (layer_class.__name__ == "Linear" and
                    "in_features" not in hparams["kwargs"]):
                raise ValueError("\"in_features\" should be specified for "
                                 "\"torch.nn.{}\"".format(layer_class.__name__))
            elif (layer_class.__name__ in ["Conv1d", "Conv2d", "Conv3d"] and
                  "in_channels" not in hparams["kwargs"]):
                raise ValueError("\"in_channels\" should be specified for "
                                 "\"torch.nn.{}\"".format(layer_class.__name__))
            default_kwargs = _layer_class_to_default_kwargs_map.get(
                layer_class, {})
            default_hparams = {"type": layer_type, "kwargs": default_kwargs}
            hparams = HParams(hparams, default_hparams)

        # this case needs to be handled separately because
        # :torch_nn:`Sequential`
        # does not accept kwargs
        if layer_type == "Sequential":
            names: List[str] = []
            layer = nn.Sequential()
            sub_hparams = hparams.kwargs.layers
            for hparam in sub_hparams:
                sub_layer = get_layer(hparam)
                name = utils.uniquify_str(sub_layer._get_name(), names)
                names.append(name)
                layer.add_module(name=name, module=sub_layer)
        else:
            layer = utils.get_instance(layer_type, hparams.kwargs.todict(),
                                       layer_modules)

    if not isinstance(layer, nn.Module):
        raise ValueError("layer must be an instance of `torch.nn.Module`.")

    return layer
Exemplo n.º 5
0
    def _build_conv1d_hparams(self, in_channels, pool_hparams):
        r"""Creates the hparams for each of the convolutional layers usable for
        :func:`texar.torch.core.layers.get_layer`.
        """
        nconv = self._hparams.num_conv_layers
        if len(pool_hparams) != nconv:
            raise ValueError("`pool_hparams` must be of length %d" % nconv)

        in_channels = [in_channels]
        out_channels = _to_list(self._hparams.out_channels, 'out_channels',
                                nconv)
        # because in_channels(i) = out_channels(i-1)
        in_channels.extend(out_channels[:-1])

        if nconv == 1:
            kernel_size = _to_list(self._hparams.kernel_size)
            if not isinstance(kernel_size[0], (list, tuple)):
                kernel_size = [kernel_size]
        elif nconv > 1:
            kernel_size = _to_list(self._hparams.kernel_size,
                                   'kernel_size', nconv)
            kernel_size = [_to_list(ks) for ks in kernel_size]

        other_kwargs = self._hparams.other_conv_kwargs
        if isinstance(other_kwargs, HParams):
            other_kwargs = other_kwargs.todict()
            other_kwargs = _to_list(other_kwargs, "other_conv_kwargs", nconv)
        elif isinstance(other_kwargs, (list, tuple)):
            if len(other_kwargs) != nconv:
                raise ValueError("The length of hparams['other_conv_kwargs'] "
                                 "must be equal to 'num_conv_layers'")
        else:
            raise ValueError("hparams['other_conv_kwargs'] must be a either "
                             "a dict or a list.")

        def _activation_hparams(name, kwargs=None):
            if kwargs is not None:
                return {"type": name, "kwargs": kwargs}
            else:
                return {"type": name, "kwargs": {}}

        conv_pool_hparams = []
        for i in range(nconv):
            hparams_i = []
            names = []
            if isinstance(other_kwargs[i], dict):
                other_kwargs[i] = _to_list(other_kwargs[i], "other_kwargs[i]",
                                           len(kernel_size[i]))
            elif (isinstance(other_kwargs[i], (list, tuple))
                  and len(other_kwargs[i]) != len(kernel_size[i])):
                raise ValueError("The length of hparams['other_conv_kwargs'][i]"
                                 " must be equal to the length of "
                                 "hparams['kernel_size'][i]")
            for idx, ks_ij in enumerate(kernel_size[i]):
                name = uniquify_str("conv_%d" % (i + 1), names)
                names.append(name)
                conv_kwargs_ij = {
                    "in_channels": in_channels[i],
                    "out_channels": out_channels[i],
                    "kernel_size": ks_ij
                }
                conv_kwargs_ij.update(other_kwargs[i][idx])
                hparams_i.append(
                    {"type": "Conv1d", "kwargs": conv_kwargs_ij})
            if len(hparams_i) == 1:
                if self._hparams.conv_activation:
                    layers = {
                        "layers": [hparams_i[0],
                                   _activation_hparams(
                                       self._hparams.conv_activation,
                                       self._hparams.conv_activation_kwargs)]}
                    sequential_layer = {"type": "Sequential", "kwargs": layers}
                    conv_pool_hparams.append([sequential_layer,
                                              pool_hparams[i]])
                else:
                    conv_pool_hparams.append([hparams_i[0], pool_hparams[i]])
            else:  # creates MergeLayer
                mrg_kwargs_layers = []
                for hparams_ij in hparams_i:
                    if self._hparams.conv_activation:
                        seq_kwargs_j = {
                            "layers": [
                                hparams_ij,
                                _activation_hparams(
                                    self._hparams.conv_activation,
                                    self._hparams.conv_activation_kwargs),
                                pool_hparams[i]
                            ]
                        }
                    else:
                        seq_kwargs_j = {"layers": [hparams_ij, pool_hparams[i]]}
                    mrg_kwargs_layers.append(
                        {"type": "Sequential", "kwargs": seq_kwargs_j})
                mrg_hparams = {"type": "MergeLayer",
                               "kwargs": {"layers": mrg_kwargs_layers}}
                conv_pool_hparams.append(mrg_hparams)

        return conv_pool_hparams