コード例 #1
0
ファイル: utils_test.py プロジェクト: ml-lab/Text_Infilling
    def test_uniquify_str(self):
        """Tests :func:`texar.utils.uniquify_str`.
        """
        str_set = ['str']
        unique_str = utils.uniquify_str('str', str_set)
        self.assertEqual(unique_str, 'str_1')

        str_set.append('str_1')
        str_set.append('str_2')
        unique_str = utils.uniquify_str('str', str_set)
        self.assertEqual(unique_str, 'str_3')
コード例 #2
0
ファイル: network_base.py プロジェクト: ml-lab/Text_Infilling
def _build_layers(network, layers=None, layer_hparams=None):
    """Builds layers.

    Either :attr:`layer_hparams` or :attr:`layers` must be
    provided. If both are given, :attr:`layers` will be used.

    Args:
        network: An instance of a subclass of
            :class:`~texar.modules.networks.network_base.FeedForwardNetworkBase`
        layers (optional): A list of layer instances.
        layer_hparams (optional): A list of layer hparams, each to which
            is fed to :func:`~texar.core.layers.get_layer` to create the
            layer instance.
    """
    with tf.variable_scope(network.variable_scope):
        if layers is not None:
            network._layers = layers
        else:
            if layer_hparams is None:
                raise ValueError(
                    'Either `layer` or `layer_hparams` is required.')
            network._layers = []
            for _, hparams in enumerate(layer_hparams):
                network._layers.append(get_layer(hparams=hparams))

    for layer in network._layers:
        layer_name = uniquify_str(layer.name, network._layer_names)
        network._layer_names.append(layer_name)
        network._layers_by_name[layer_name] = layer
コード例 #3
0
def _build_layers(network: FeedForwardNetworkBase,
                  layers: Optional[nn.ModuleList] = None,
                  layer_hparams: Optional[List[Union[HParams,
                                                     Dict[str, Any]]]] = None):
    r"""Builds layers.

    Either :attr:`layer_hparams` or :attr:`layers` must be
    provided. If both are given, :attr:`layers` will be used.

    Args:
        network: An instance of a subclass of
            :class:`~texar.modules.networks.FeedForwardNetworkBase`.
        layers (optional): A list of layer instances supplied as an instance of
            :torch_nn:`ModuleList`.
        layer_hparams (optional): A list of layer hparams, each to which
            is fed to :func:`~texar.core.layers.get_layer` to create the
            layer instance.
    """
    if layers is not None:
        network._layers = layers
    else:
        if layer_hparams is None:
            raise ValueError('Either `layer` or `layer_hparams` is required.')
        network._layers = nn.ModuleList()
        for _, hparams in enumerate(layer_hparams):
            network._layers.append(get_layer(hparams=hparams))

    for layer in network._layers:
        layer_name = uniquify_str(layer._get_name(), network._layer_names)
        network._layer_names.append(layer_name)
        network._layers_by_name[layer_name] = layer
コード例 #4
0
    def _build_conv1d_hparams(self, pool_hparams):
        """Creates the hparams for each of the conv layers usable for
        :func:`texar.core.layers.get_layer`.
        """
        nconv = self._hparams.num_conv_layers
        if len(pool_hparams) != nconv:
            raise ValueError("`pool_hparams` must be of length %d" % nconv)

        filters = _to_list(self._hparams.filters, 'filters', nconv)
        if nconv == 1:
            kernel_size = _to_list(self._hparams.kernel_size)
            if not isinstance(kernel_size[0], (list, tuple)):
                kernel_size = [kernel_size]
        elif nconv > 1:
            kernel_size = _to_list(self._hparams.kernel_size,
                                   'kernel_size', nconv)
            kernel_size = [_to_list(ks) for ks in kernel_size]

        other_kwargs = self._hparams.other_conv_kwargs or {}
        if isinstance(other_kwargs, HParams):
            other_kwargs = other_kwargs.todict()
        if not isinstance(other_kwargs, dict):
            raise ValueError("hparams['other_conv_kwargs'] must be a dict.")

        conv_pool_hparams = []
        activation_fn = get_activation_fn(
            self._hparams.conv_activation,
            self._hparams.conv_activation_kwargs)
        for i in range(nconv):
            hparams_i = []
            names = []
            for ks_ij in kernel_size[i]:
                name = uniquify_str("conv_%d" % (i+1), names)
                names.append(name)
                conv_kwargs_ij = {
                    "filters": filters[i],
                    "kernel_size": ks_ij,
                    "activation": activation_fn,
                    "name": name
                }
                conv_kwargs_ij.update(other_kwargs)
                hparams_i.append(
                    {"type": "Conv1D", "kwargs": conv_kwargs_ij})
            if len(hparams_i) == 1:
                conv_pool_hparams.append([hparams_i[0], pool_hparams[i]])
            else:  # creates MergeLayer
                mrg_kwargs_layers = []
                for hparams_ij in hparams_i:
                    seq_kwargs_j = {"layers": [hparams_ij, pool_hparams[i]]}
                    mrg_kwargs_layers.append(
                        {"type": "SequentialLayer", "kwargs": seq_kwargs_j})
                mrg_hparams = {"type": "MergeLayer",
                               "kwargs": {"layers": mrg_kwargs_layers,
                                          "name": "conv_pool_%d" % (i+1)}}
                conv_pool_hparams.append(mrg_hparams)

        return conv_pool_hparams
コード例 #5
0
    def append_layer(self, layer: Union[nn.Module, HParams, Dict[str, Any]]):
        r"""Appends a layer to the end of the network.

        Args:
            layer: A subclass of :torch_nn:`Module`, or a dict of layer
                hyperparameters.
        """
        layer_ = layer
        if not isinstance(layer_, nn.Module):
            layer_ = get_layer(hparams=layer_)
        self._layers.append(layer_)
        layer_name = uniquify_str(layer_._get_name(), self._layer_names)
        self._layer_names.append(layer_name)
        self._layers_by_name[layer_name] = layer_
コード例 #6
0
ファイル: network_base.py プロジェクト: ml-lab/Text_Infilling
    def append_layer(self, layer):
        """Appends a layer to the end of the network. The method is only
        feasible before :attr:`_build` is called.

        Args:
            layer: A :tf_main:`tf.layers.Layer <layers/Layer>` instance, or
                a dict of layer hyperparameters.
        """
        if self._built:
            raise TexarError("`FeedForwardNetwork.append_layer` can be "
                             "called only before `_build` is called.")

        with tf.variable_scope(self.variable_scope):
            layer_ = layer
            if not isinstance(layer_, tf.layers.Layer):
                layer_ = get_layer(hparams=layer_)
            self._layers.append(layer_)
            layer_name = uniquify_str(layer_.name, self._layer_names)
            self._layer_names.append(layer_name)
            self._layers_by_name[layer_name] = layer_
コード例 #7
0
def get_layer(hparams: Union[HParams, Dict[str, Any]]) -> nn.Module:
    r"""Makes a layer instance.

    The layer must be an instance of :torch_nn:`Module`.

    Args:
        hparams (dict or HParams): Hyperparameters of the layer, with
            structure:

            .. code-block:: python

                {
                    "type": "LayerClass",
                    "kwargs": {
                        # Keyword arguments of the layer class
                        # ...
                    }
                }

            Here:

            `"type"`: str or layer class or layer instance
                The layer type. This can be

                - The string name or full module path of a layer class. If
                  the class name is provided, the class must be in module
                  :torch_nn:`Module`, :mod:`texar.core`, or :mod:`texar.custom`.
                - A layer class.
                - An instance of a layer class.

                For example

                .. code-block:: python

                    "type": "Conv1D"                 ..no         # class name
                    "type": "texar.core.MaxReducePooling1D"   # module path
                    "type": "my_module.MyLayer"               # module path
                    "type": torch.nn.Module.Linear            # class
                    "type": Conv1D(filters=10, kernel_size=2) # cell instance
                    "type": MyLayer(...)                      # cell instance

            `"kwargs"`: dict
                A dictionary of keyword arguments for constructor of the
                layer class. Ignored if :attr:`"type"` is a layer instance.

                - Arguments named "activation" can be a callable, or a `str` of
                  the name or module path to the activation function.
                - Arguments named "\*_regularizer" and "\*_initializer" can be a
                  class instance, or a `dict` of hyperparameters of respective
                  regularizers and initializers. See
                - Arguments named "\*_constraint" can be a callable, or a `str`
                  of the name or full path to the constraint function.

    Returns:
        A layer instance. If ``hparams["type"]`` is a layer instance, returns it
        directly.

    Raises:
        ValueError: If :attr:`hparams` is `None`.
        ValueError: If the resulting layer is not an instance of
            :torch_nn:`Module`.
    """
    if hparams is None:
        raise ValueError("`hparams` must not be `None`.")

    layer_type = hparams["type"]
    if not is_str(layer_type) and not isinstance(layer_type, type):
        layer = layer_type
    else:
        layer_modules = ["torch.nn", "texar.core", "texar.custom"]
        layer_class = utils.check_or_get_class(layer_type, layer_modules)
        if isinstance(hparams, dict):
            if (layer_class.__name__ == "Linear"
                    and "in_features" not in hparams["kwargs"]):
                raise ValueError("\"in_features\" should be specified for "
                                 "\"torch.nn.{}\"".format(
                                     layer_class.__name__))
            elif (layer_class.__name__ in ["Conv1d", "Conv2d", "Conv3d"]
                  and "in_channels" not in hparams["kwargs"]):
                raise ValueError("\"in_channels\" should be specified for "
                                 "\"torch.nn.{}\"".format(
                                     layer_class.__name__))
            default_kwargs = _layer_class_to_default_kwargs_map.get(
                layer_class, {})
            default_hparams = {"type": layer_type, "kwargs": default_kwargs}
            hparams = HParams(hparams, default_hparams)

        # this case needs to be handled separately because
        # :torch_nn:`Sequential`
        # does not accept kwargs
        if layer_type == "Sequential":
            names: List[str] = []
            layer = nn.Sequential()
            sub_hparams = hparams.kwargs.layers
            for hparam in sub_hparams:
                sub_layer = get_layer(hparam)
                name = utils.uniquify_str(sub_layer._get_name(), names)
                names.append(name)
                layer.add_module(name=name, module=sub_layer)
        else:
            layer = utils.get_instance(layer_type, hparams.kwargs.todict(),
                                       layer_modules)

    if not isinstance(layer, nn.Module):
        raise ValueError("layer must be an instance of `torch.nn.Module`.")

    return layer
コード例 #8
0
    def _build_conv1d_hparams(self, in_channels, pool_hparams):
        r"""Creates the hparams for each of the convolutional layers usable for
        :func:`texar.core.layers.get_layer`.
        """
        nconv = self._hparams.num_conv_layers
        if len(pool_hparams) != nconv:
            raise ValueError("`pool_hparams` must be of length %d" % nconv)

        in_channels = [in_channels]
        out_channels = _to_list(self._hparams.out_channels, 'out_channels',
                                nconv)
        # because in_channels(i) = out_channels(i-1)
        in_channels.extend(out_channels[:-1])

        if nconv == 1:
            kernel_size = _to_list(self._hparams.kernel_size)
            if not isinstance(kernel_size[0], (list, tuple)):
                kernel_size = [kernel_size]
        elif nconv > 1:
            kernel_size = _to_list(self._hparams.kernel_size, 'kernel_size',
                                   nconv)
            kernel_size = [_to_list(ks) for ks in kernel_size]

        other_kwargs = self._hparams.other_conv_kwargs or {}
        if isinstance(other_kwargs, HParams):
            other_kwargs = other_kwargs.todict()
        if not isinstance(other_kwargs, dict):
            raise ValueError("hparams['other_conv_kwargs'] must be a dict.")

        def _activation_hparams(name, kwargs=None):
            if kwargs is not None:
                return {"type": name, "kwargs": kwargs}
            else:
                return {"type": name, "kwargs": {}}

        conv_pool_hparams = []
        for i in range(nconv):
            hparams_i = []
            names = []
            for ks_ij in kernel_size[i]:
                name = uniquify_str("conv_%d" % (i + 1), names)
                names.append(name)
                conv_kwargs_ij = {
                    "in_channels": in_channels[i],
                    "out_channels": out_channels[i],
                    "kernel_size": ks_ij
                }
                conv_kwargs_ij.update(other_kwargs)
                hparams_i.append({"type": "Conv1d", "kwargs": conv_kwargs_ij})
            if len(hparams_i) == 1:
                if self._hparams.conv_activation:
                    layers = {
                        "layers": [
                            hparams_i[0],
                            _activation_hparams(
                                self._hparams.conv_activation,
                                self._hparams.conv_activation_kwargs)
                        ]
                    }
                    sequential_layer = {"type": "Sequential", "kwargs": layers}
                    conv_pool_hparams.append(
                        [sequential_layer, pool_hparams[i]])
                else:
                    conv_pool_hparams.append([hparams_i[0], pool_hparams[i]])
            else:  # creates MergeLayer
                mrg_kwargs_layers = []
                for hparams_ij in hparams_i:
                    if self._hparams.conv_activation:
                        seq_kwargs_j = {
                            "layers": [
                                hparams_ij,
                                _activation_hparams(
                                    self._hparams.conv_activation,
                                    self._hparams.conv_activation_kwargs),
                                pool_hparams[i]
                            ]
                        }
                    else:
                        seq_kwargs_j = {
                            "layers": [hparams_ij, pool_hparams[i]]
                        }
                    mrg_kwargs_layers.append({
                        "type": "Sequential",
                        "kwargs": seq_kwargs_j
                    })
                mrg_hparams = {
                    "type": "MergeLayer",
                    "kwargs": {
                        "layers": mrg_kwargs_layers
                    }
                }
                conv_pool_hparams.append(mrg_hparams)

        return conv_pool_hparams