def _build_layers(self,
                      layers: Optional[nn.ModuleList] = None,
                      layer_hparams: Optional[List[
                          Union[HParams, Dict[str, Any]]]] = None):
        r"""Builds layers.

        Either :attr:`layer_hparams` or :attr:`layers` must be
        provided. If both are given, :attr:`layers` will be used.

        Args:
            layers (optional): A list of layer instances supplied as an instance
                of :torch_nn:`ModuleList`.
            layer_hparams (optional): A list of layer hparams, each to which
                is fed to :func:`~texar.torch.core.layers.get_layer` to create
                the layer instance.
        """
        if layers is not None:
            self._layers = layers
        else:
            if layer_hparams is None:
                raise ValueError(
                    'Either `layer` or `layer_hparams` is required.')
            self._layers = nn.ModuleList()
            for _, hparams in enumerate(layer_hparams):
                self._layers.append(get_layer(hparams=hparams))

        for layer in self._layers:
            layer_name = uniquify_str(layer.__class__.__name__,
                                      self._layer_names)
            self._layer_names.append(layer_name)
            self._layers_by_name[layer_name] = layer
예제 #2
0
def _build_dense_output_layer(cell_output_size: int,
                              hparams: HParams) -> Optional[nn.Sequential]:
    r"""Build the output layers.

    Args:
        cell_output_size: The output size of the rnn cell.
        hparams (dict or HParams): Hyperparameters. Missing hyperparameters
            will be set to default values. See
            :meth:`default_hparams` for the hyperparameter structure and
            default values.

    Returns:
        A :torch_nn:`Sequential` module containing the output layers.
    """
    nlayers = hparams.num_layers

    if nlayers <= 0:
        return None

    layer_size = _to_list(hparams.layer_size, 'output_layer.layer_size',
                          nlayers)

    dropout_layer_ids = _to_list(hparams.dropout_layer_ids)

    other_kwargs = hparams.other_dense_kwargs or {}
    if isinstance(other_kwargs, HParams):
        other_kwargs = other_kwargs.todict()
    if not isinstance(other_kwargs, dict):
        raise ValueError(
            "hparams 'output_layer.other_dense_kwargs' must be a dict.")

    output_layers: List[nn.Module] = []
    for i in range(nlayers):
        if i in dropout_layer_ids:
            # TODO: Variational dropout is not implemented.
            output_layers.append(nn.Dropout(p=hparams.dropout_rate))

        dense_layer = nn.Linear(
            in_features=(cell_output_size if i == 0 else layer_size[i - 1]),
            out_features=layer_size[i],
            **other_kwargs)

        output_layers.append(dense_layer)

        if i == nlayers - 1:
            activation = hparams.final_layer_activation
        else:
            activation = hparams.activation

        if activation is not None:
            layer_hparams = {"type": activation, "kwargs": {}}
            activation_layer = layers.get_layer(hparams=layer_hparams)
            output_layers.append(activation_layer)

    if nlayers in dropout_layer_ids:
        output_layers.append(nn.Dropout(p=hparams.dropout_rate))

    return nn.Sequential(*output_layers)
예제 #3
0
    def test_get_layer(self):
        r"""Tests :func:`texar.torch.core.layers.get_layer`.
        """
        hparams = {"type": "Conv1d",
                   "kwargs": {"in_channels": 16,
                              "out_channels": 32,
                              "kernel_size": 2}
                   }

        layer = layers.get_layer(hparams)
        self.assertIsInstance(layer, nn.Conv1d)

        hparams = {
            "type": "MergeLayer",
            "kwargs": {
                "layers": [
                    {"type": "Conv1d",
                     "kwargs": {"in_channels": 16,
                                "out_channels": 32,
                                "kernel_size": 2}},
                    {"type": "Conv1d",
                     "kwargs": {"in_channels": 16,
                                "out_channels": 32,
                                "kernel_size": 2}}
                ]
            }
        }
        layer = layers.get_layer(hparams)
        self.assertIsInstance(layer, layers.MergeLayer)

        hparams = {"type": "Conv1d",
                   "kwargs": {"in_channels": 16,
                              "out_channels": 32,
                              "kernel_size": 2}
                   }
        layer = layers.get_layer(hparams)
        self.assertIsInstance(layer, nn.Conv1d)

        hparams = {
            "type": nn.Conv1d(in_channels=16, out_channels=32, kernel_size=2)
        }
        layer = layers.get_layer(hparams)
        self.assertIsInstance(layer, nn.Conv1d)
    def append_layer(self, layer: Union[nn.Module, HParams, Dict[str, Any]]):
        r"""Appends a layer to the end of the network.

        Args:
            layer: A subclass of :torch_nn:`Module`, or a dict of layer
                hyperparameters.
        """
        layer_ = layer
        if not isinstance(layer_, nn.Module):
            layer_ = get_layer(hparams=layer_)
        self._layers.append(layer_)
        layer_name = uniquify_str(layer_.__class__.__name__, self._layer_names)
        self._layer_names.append(layer_name)
        self._layers_by_name[layer_name] = layer_