Пример #1
0
    def __init__(self,
                 output_size: OutputSize,
                 mlp_input_size: Union[torch.Size, MaybeTuple[int], int],
                 distribution: Union[Distribution, str] = 'MultivariateNormal',
                 distribution_kwargs: Optional[Dict[str, Any]] = None,
                 hparams: Optional[HParams] = None):
        super().__init__(output_size, hparams)
        if distribution_kwargs is None:
            distribution_kwargs = {}
        self._dstr_type = distribution
        self._dstr_kwargs = distribution_kwargs

        for dstr_attr, dstr_val in distribution_kwargs.items():
            if isinstance(dstr_val, torch.Tensor):
                dstr_param = nn.Parameter(dstr_val)
                distribution_kwargs[dstr_attr] = dstr_param
                self.register_parameter(dstr_attr, dstr_param)
        if isinstance(mlp_input_size, int):
            input_feature = mlp_input_size
        else:
            input_feature = np.prod(mlp_input_size)
        self._linear_layer = nn.Linear(
            input_feature, _sum_output_size(output_size))

        self._activation_fn = get_activation_fn(
            self.hparams.activation_fn)
Пример #2
0
    def __init__(self,
                 output_size: OutputSize,
                 mlp_input_size: Union[torch.Size, MaybeTuple[int], int],
                 distribution: Union[Distribution, str] = 'MultivariateNormal',
                 distribution_kwargs: Optional[Dict[str, Any]] = None,
                 hparams: Optional[HParams] = None):
        super().__init__(output_size, hparams)
        if distribution_kwargs is None:
            distribution_kwargs = {}
        self._dstr_kwargs = distribution_kwargs
        if isinstance(distribution, str):
            self._dstr: Distribution = utils.check_or_get_instance(
                distribution, self._dstr_kwargs,
                ["torch.distributions", "texar.custom"])
        else:
            self._dstr = distribution

        if self._dstr.has_rsample:
            raise ValueError("Distribution should not be reparameterizable")

        if isinstance(mlp_input_size, int):
            input_feature = mlp_input_size
        else:
            input_feature = np.prod(mlp_input_size)
        self._linear_layer = nn.Linear(
            input_feature, _sum_output_size(output_size))

        self._activation_fn = get_activation_fn(
            self.hparams.activation_fn)
Пример #3
0
 def __init__(self,
              output_size: OutputSize,
              linear_layer_dim: int,
              hparams: Optional[HParams] = None):
     super().__init__(output_size, hparams)
     self._linear_layer = nn.Linear(
         linear_layer_dim, _sum_output_size(output_size))
     self._activation_fn = get_activation_fn(
         self.hparams.activation_fn)