Ejemplo n.º 1
0
    def __init__(self, hparams=None):
        ClassifierBase.__init__(self, hparams)

        with tf.variable_scope(self.variable_scope):
            encoder_hparams = utils.dict_fetch(
                hparams, Conv1DEncoder.default_hparams())
            self._encoder = Conv1DEncoder(hparams=encoder_hparams)

            # Add an additional dense layer if needed
            self._num_classes = self._hparams.num_classes
            if self._num_classes > 0:
                if self._hparams.num_dense_layers <= 0:
                    self._encoder.append_layer({"type": "Flatten"})

                logit_kwargs = self._hparams.logit_layer_kwargs
                if logit_kwargs is None:
                    logit_kwargs = {}
                elif not isinstance(logit_kwargs, HParams):
                    raise ValueError(
                        "hparams['logit_layer_kwargs'] must be a dict.")
                else:
                    logit_kwargs = logit_kwargs.todict()
                logit_kwargs.update({"units": self._num_classes})
                if 'name' not in logit_kwargs:
                    logit_kwargs['name'] = "logit_layer"

                self._encoder.append_layer(
                    {"type": "Dense", "kwargs": logit_kwargs})
Ejemplo n.º 2
0
    def __init__(self, in_channels: int, in_features: Optional[int] = None,
                 hparams: Optional[Union[HParams, Dict[str, Any]]] = None):
        super().__init__(hparams=hparams)

        encoder_hparams = utils.dict_fetch(hparams,
                                           Conv1DEncoder.default_hparams())
        self._encoder = Conv1DEncoder(in_channels=in_channels,
                                      in_features=in_features,
                                      hparams=encoder_hparams)

        # Add an additional dense layer if needed
        self._num_classes = self._hparams.num_classes
        if self._num_classes > 0:
            if self._hparams.num_dense_layers <= 0:
                self._encoder.append_layer({"type": "Flatten"})

            logit_kwargs = self._hparams.logit_layer_kwargs
            if logit_kwargs is None:
                logit_kwargs = {}
            elif not isinstance(logit_kwargs, HParams):
                raise ValueError(
                    "hparams['logit_layer_kwargs'] must be a dict.")
            else:
                logit_kwargs = logit_kwargs.todict()
            logit_kwargs.update({"out_features": self._num_classes})

            self._encoder.append_layer({"type": "Linear",
                                        "kwargs": logit_kwargs})
Ejemplo n.º 3
0
    def default_hparams() -> Dict[str, Any]:
        r"""Returns a dictionary of hyperparameters with default values.

        .. code-block:: python

            {
                # (1) Same hyperparameters as in Conv1DEncoder
                ...

                # (2) Additional hyperparameters
                "num_classes": 2,
                "logit_layer_kwargs": {
                    "use_bias": False
                },
                "name": "conv1d_classifier"
            }

        Here:

        1. Same hyperparameters as in :class:`~texar.modules.Conv1DEncoder`.
           See the :meth:`~texar.modules.Conv1DEncoder.default_hparams`.
           An instance of :class:`~texar.modules.Conv1DEncoder` is created for
           feature extraction.

        2. Additional hyperparameters:

           `"num_classes"`: int
               Number of classes:

               - If `> 0`, an additional :torch_nn:`Linear`
                 layer is appended to the encoder to compute the logits over
                 classes.

               - If `<= 0`, no dense layer is appended. The number of
                 classes is assumed to be equal to ``out_features`` of the
                 final dense layer size of the encoder.

           `"logit_layer_kwargs"`: dict
               Keyword arguments for the logit :torch_nn:`Linear` layer
               constructor, except for argument ``out_features`` which is set
               to ``"num_classes"``. Ignored if no extra logit layer is
               appended.

           `"name"`: str
               Name of the classifier.
        """
        hparams = Conv1DEncoder.default_hparams()
        hparams.update({
            "name": "conv1d_classifier",
            "num_classes": 2,  # set to <=0 to avoid appending output layer
            "logit_layer_kwargs": {
                "in_features": hparams["out_features"],
                "bias": True
            }
        })
        return hparams
Ejemplo n.º 4
0
 def default_hparams():
     """Returns a dictionary of hyperparameters with default values.
     """
     hparams = Conv1DEncoder.default_hparams()
     hparams.update({
         "name": "conv1d_classifier",
         "num_classes": 2, #set to <=0 to avoid appending output layer
         "logit_layer_kwargs": {"use_bias": False}
     })
     return hparams
Ejemplo n.º 5
0
    def default_hparams():
        """Returns a dictionary of hyperparameters with default values.

        .. code-block:: python

            {
                # (1) Same hyperparameters as in Conv1DEncoder
                ...

                # (2) Additional hyperparameters
                "num_classes": 2,
                "logit_layer_kwargs": {
                    "use_bias": False
                },
                "name": "conv1d_classifier"
            }

        Here:

        1. Same hyperparameters as in :class:`~texar.modules.Conv1DEncoder`.
        See the :meth:`~texar.modules.Conv1DEncoder.default_hparams`.
        An instance of Conv1DEncoder is created for feature extraction.

        2. Additional hyperparameters:

            "num_classes" : int
                Number of classes:

                - If **`> 0`**, an additional :tf_main:`Dense <layers/Dense>` \
                layer is appended to the encoder to compute the logits over \
                classes.
                - If **`<= 0`**, no dense layer is appended. The number of \
                classes is assumed to be the final dense layer size of the \
                encoder.

            "logit_layer_kwargs" : dict
                Keyword arguments for the logit Dense layer constructor,
                except for argument "units" which is set to "num_classes".
                Ignored if no extra logit layer is appended.

            "name" : str
                Name of the classifier.
        """
        hparams = Conv1DEncoder.default_hparams()
        hparams.update({
            "name": "conv1d_classifier",
            "num_classes": 2,  #set to <=0 to avoid appending output layer
            "logit_layer_kwargs": {
                "use_bias": False
            }
        })
        return hparams