def __init__(self, dims: Tuple[int, Tuple[int, ...], int],
                 activation_fn: Callable[[Tensor], Tensor]=tr.relu,
                 output_activation: Opt[Callable[[Tensor], Tensor]]=tr.sigmoid):
        """
        Generative network

        Generates samples from the original distribution
        p(x) by transforming a latent representation, e.g.
        by finding p_θ(x|z).

        :param dims: dimensions of the networks
            given by the number of neurons on the form
            [latent_dim, [hidden_dims], input_dim].
        """
        super(Decoder, self).__init__()

        z_dim, h_dim, x_dim = dims

        neurons = [z_dim, *h_dim]
        linear_layers = [nn.Linear(neurons[i - 1], neurons[i]) for i in range(1, len(neurons))]
        # noinspection PyTypeChecker
        self.hidden = nn.ModuleList(linear_layers)
        self.reconstruction = nn.Linear(h_dim[-1], x_dim)
        self.output_activation = Act(output_activation) if (output_activation is not None) else None
        self.activation_fn = Act(activation_fn)
 def __init__(self, in_dim: int, out_dim: int, hidden_dim: int):
     super().__init__()
     self.network = nn.Sequential(
         nn.Linear(in_dim, hidden_dim),
         nn.Tanh(),
         nn.Linear(hidden_dim, hidden_dim),
         nn.Tanh(),
         nn.Linear(hidden_dim, out_dim),
     )
Пример #3
0
 def __init__(self, dims: Tuple[int, int, int], activation_fn: Callable[[Tensor], Tensor]=tr.relu):
     """
     Single hidden layer classifier
     with softmax output.
     """
     super(Classifier, self).__init__()
     x_dim, h_dim, y_dim = dims
     self.dense = nn.Linear(x_dim, h_dim)
     self.logits = nn.Linear(h_dim, y_dim)
     self.activation_fn = Act(activation_fn)
Пример #4
0
 def __init__(self, in_features: int, out_features: int):
     """
     Precision weighted merging of two Gaussian
     distributions.
     Merges information from z into the given
     mean and log variance and produces
     a sample from this new distribution.
     """
     super(GaussianMerge, self).__init__()
     self.in_features = in_features
     self.out_features = out_features
     self.μ = nn.Linear(in_features, out_features)
     self.log_σ = nn.Linear(in_features, out_features)
Пример #5
0
    def __init__(self, in_features: int, out_features: int):
        """
        Layer that represents a sample from a
        Gaussian distribution.

        Base stochastic layer that uses the
        reparametrization trick [Kingma 2013]
        to draw a sample from a distribution
        parametrised by μ and log_σ.
        """
        super(GaussianSample, self).__init__(in_features, out_features)
        self.μ = nn.Linear(in_features, out_features)
        self.log_σ = nn.Linear(in_features, out_features)
Пример #6
0
 def __init__(self,
              in_features: int,
              out_features: int,
              dist: Distrib = Normal()):
     super(Sample, self).__init__(in_features, out_features)
     self.params = nn.Linear(in_features, out_features * dist.nparams)
     self.dist = dist
    def __init__(self, dims: Tuple[int, Tuple[int, ...], int],
                 sample_layer: Type[BaseSample]=GaussianSample,
                 activation_fn: Callable[[Tensor], Tensor]=tr.relu):
        """
        Inference network

        Attempts to infer the probability distribution
        p(z|x) from the data by fitting a variational
        distribution q_φ(z|x). Returns the two parameters
        of the distribution (µ, log σ²).

        :param dims: dimensions of the networks
           given by the number of neurons on the form
           [input_dim, [hidden_dims], latent_dim].
        :param sample_layer: subclass of the BaseSample
        """
        super(Encoder, self).__init__()

        x_dim, h_dim, z_dim = dims
        neurons = [x_dim, *h_dim]
        linear_layers = [nn.Linear(neurons[i - 1], neurons[i]) for i in range(1, len(neurons))]
        # noinspection PyTypeChecker
        self.hidden = nn.ModuleList(linear_layers)
        self.activation_fn = Act(activation_fn)
        self.sample = sample_layer(h_dim[-1], z_dim)
 def __init__(self, dims: Seq[int], activation_fn: Callable[[Tensor], Tensor]=tr.relu,
              output_activation: Opt[Callable[[Tensor], Tensor]]=None):
     super(Perceptron, self).__init__()
     self.dims = dims
     self.activation_fn = Act(activation_fn)
     self.output_activation = Act(output_activation) if (output_activation is not None) else None
     # noinspection PyTypeChecker
     self.layers = nn.ModuleList(list(map(lambda d: nn.Linear(*d), list(zip(dims, dims[1:])))))
    def __init__(self, dims: Tuple[int, int, int]):
        """
        The ladder dencoder differs from the standard encoder
        by using batch-normalization and LReLU activation.
        Additionally, it also returns the transformation x.

        :param dims: dimensions of the networks
            given by the number of neurons on the form
            (latent_dim, hidden_dim, input_dim).
        """
        super(LadderDecoder, self).__init__()

        self.z_dim, h_dim, x_dim = dims

        self.linear1 = nn.Linear(x_dim, h_dim)
        self.batchnorm1 = nn.BatchNorm1d(h_dim)
        self.merge = GaussianMerge(h_dim, self.z_dim)

        self.linear2 = nn.Linear(x_dim, h_dim)
        self.batchnorm2 = nn.BatchNorm1d(h_dim)
        self.sample = GaussianSample(h_dim, self.z_dim)
Пример #10
0
 def __init__(self,
              in_features: int,
              out_features: int,
              n_distributions: int,
              τ: float = 1.0):
     """
     Layer that represents a sample from a categorical
     distribution. Enables sampling and stochastic
     backpropagation using the Gumbel-Softmax trick.
     """
     super(GumbelSoftmax, self).__init__(in_features=in_features,
                                         out_features=out_features)
     self.n_distributions = n_distributions
     self.logits = nn.Linear(in_features, n_distributions * out_features)
     self.τ = τ
Пример #11
0
    def __init__(self, dims: Tuple[int, int, int]):
        """
        The ladder encoder differs from the standard encoder
        by using batch-normalization and LReLU activation.
        Additionally, it also returns the transformation x.

        :param dims: dimensions (input_dim, hidden_dim, latent_dim).
        """
        super(LadderEncoder, self).__init__()

        x_dim, h_dim, self.z_dim = dims
        self.in_features = x_dim
        self.out_features = h_dim

        self.linear = nn.Linear(x_dim, h_dim)
        self.batchnorm = nn.BatchNorm1d(h_dim)
        self.sample = GaussianSample(h_dim, self.z_dim)
Пример #12
0
    def __init__(self, dims: Tuple[int, int, int, Tuple[int, ...]], features: VariationalAutoencoder):
        """
        M1+M2 model as described in [Kingma 2014].

        Initialise a new stacked generative model
        :param dims: dimensions of x, y, z and hidden layers
        :param features: a pretrained M1 model of class `VariationalAutoencoder`
            trained on the same dataset.
        """
        x_dim, y_dim, z_dim, h_dim = dims
        super(StackedDeepGenerativeModel, self).__init__((features.z_dim, y_dim, z_dim, h_dim))

        # Be sure to reconstruct with the same dimensions
        in_features = self.decoder.reconstruction.in_features
        self.decoder.reconstruction = nn.Linear(in_features, x_dim)

        # Make vae feature model untrainable by freezing parameters
        self.features = features
        self.features.train(False)

        param: nn.Parameter
        for param in self.features.parameters():
            param.requires_grad = False