Пример #1
0
    def __init__(
        self,
        in_features: int,
        out_channels: int,
        cardinality: int,
        num_repetitions: int = 1,
        dropout: float = 0.0,
        leaf_base_class: Leaf = RatNormal,
        leaf_base_kwargs: Dict = None,
    ):
        """
        Create multivariate distribution that only has non zero values in the covariance matrix on the diagonal.

        Args:
            out_channels: Number of parallel representations for each input feature.
            cardinality: Number of variables per gauss.
            in_features: Number of input features.
            dropout: Dropout probabilities.
            leaf_base_class (Leaf): The encapsulating base leaf layer class.

        """
        super(IndependentMultivariate,
              self).__init__(in_features, out_channels, num_repetitions,
                             dropout)
        if leaf_base_kwargs is None:
            leaf_base_kwargs = {}

        self.base_leaf = leaf_base_class(
            out_channels=out_channels,
            in_features=in_features,
            dropout=dropout,
            num_repetitions=num_repetitions,
            **leaf_base_kwargs,
        )
        self._pad = (cardinality -
                     self.in_features % cardinality) % cardinality
        # Number of input features for the product needs to be extended depending on the padding applied here
        prod_in_features = in_features + self._pad
        self.prod = Product(in_features=prod_in_features,
                            cardinality=cardinality,
                            num_repetitions=num_repetitions)

        self.cardinality = check_valid(cardinality, int, 1, in_features + 1)
        self.out_shape = f"(N, {self.prod._out_features}, {out_channels}, {self.num_repetitions})"
Пример #2
0
    def __init__(self, multiplicity, in_features, cardinality, dropout=0.0):
        """
        Create multivariate normal that only has non zero values in the covariance matrix on the diagonal.

        Args:
            multiplicity: Number of parallel representations for each input feature.
            cardinality: Number of variables per gauss.
            in_features: Number of input features.
            droptout: Dropout probabilities.
        """
        super(IndependentNormal, self).__init__(multiplicity, in_features,
                                                dropout)
        self.gauss = RatNormal(multiplicity=multiplicity,
                               in_features=in_features,
                               dropout=dropout)
        self.prod = Product(in_features=in_features, cardinality=cardinality)
        self._pad = (cardinality -
                     self.in_features % cardinality) % cardinality

        self.cardinality = cardinality
        self.out_shape = f"(N, {self.prod._out_features}, {multiplicity})"
Пример #3
0

if __name__ == "__main__":
    from spn.algorithms.layerwise.layers import Product, Sum

    # Setup
    I = 3
    in_features = 2
    num_repetitions = 1
    batch_size = 1

    # Leaf layer: DistributionsMixture
    dists = [Gamma, Beta, Chi2, Cauchy]
    leaf = Mixture(distributions=dists, in_features=in_features, out_channels=I, num_repetitions=num_repetitions)
    # Add further layers
    pro1 = Product(in_features=in_features, cardinality=in_features, num_repetitions=num_repetitions)
    sum1 = Sum(in_features=1, in_channels=I, out_channels=1, num_repetitions=1)

    # Random input
    x = torch.randn(batch_size, in_features)

    # Pass through leaf mixture layer
    x = leaf(x)

    # Check dimensions
    n, d, c, r = x.shape
    assert n == batch_size
    assert d == in_features
    assert c == I
    assert r == num_repetitions
        return tr_err

    AE_tr_err = train_ae(model_f, model_de, train_xs, train_xt, drift_num)

    def test_ae(model_f, model_de, test_x):
        model_f.eval()
        model_de.eval()
        cri = torch.nn.MSELoss()
        test_x = test_x.cuda()
        feature = model_f(test_x)
        output = model_de(feature)
        loss = cri(output, test_x)
        return loss.item()

    gauss = Normal(multiplicity=5, in_features=50)
    prod1 = Product(in_features=50, cardinality=5)
    sum1 = Sum(in_features=10, in_channels=5, out_channels=1)
    prod2 = Product(in_features=10, cardinality=10)
    spn = nn.Sequential(gauss, prod1, sum1, prod2).cuda()
    clipper = DistributionClipper()
    optimizer_spn = torch.optim.Adam(spn.parameters(), lr=0.001)
    optimizer_spn.zero_grad()

    #temp_loss = []
    def train_spn(model_f, spn, train_x):
        model_f.eval()
        spn.train()
        if True:
            for t in range(200):
                for i in range(len(train_x)):
                    data = train_x[i]
Пример #5
0
class IndependentMultivariate(Leaf):
    def __init__(
        self,
        in_features: int,
        out_channels: int,
        cardinality: int,
        num_repetitions: int = 1,
        dropout: float = 0.0,
        leaf_base_class: Leaf = RatNormal,
        leaf_base_kwargs: Dict = None,
    ):
        """
        Create multivariate distribution that only has non zero values in the covariance matrix on the diagonal.

        Args:
            out_channels: Number of parallel representations for each input feature.
            cardinality: Number of variables per gauss.
            in_features: Number of input features.
            dropout: Dropout probabilities.
            leaf_base_class (Leaf): The encapsulating base leaf layer class.

        """
        super(IndependentMultivariate,
              self).__init__(in_features, out_channels, num_repetitions,
                             dropout)
        if leaf_base_kwargs is None:
            leaf_base_kwargs = {}

        self.base_leaf = leaf_base_class(
            out_channels=out_channels,
            in_features=in_features,
            dropout=dropout,
            num_repetitions=num_repetitions,
            **leaf_base_kwargs,
        )
        self._pad = (cardinality -
                     self.in_features % cardinality) % cardinality
        # Number of input features for the product needs to be extended depending on the padding applied here
        prod_in_features = in_features + self._pad
        self.prod = Product(in_features=prod_in_features,
                            cardinality=cardinality,
                            num_repetitions=num_repetitions)

        self.cardinality = check_valid(cardinality, int, 1, in_features + 1)
        self.out_shape = f"(N, {self.prod._out_features}, {out_channels}, {self.num_repetitions})"

    def _init_weights(self):
        if isinstance(self.base_leaf, RatNormal):
            truncated_normal_(self.base_leaf.stds, std=0.5)

    def forward(self, x: torch.Tensor):
        # Pass through base leaf
        x = self.base_leaf(x)

        if self._pad:
            # Pad marginalized node
            x = F.pad(x,
                      pad=[0, 0, 0, 0, 0, self._pad],
                      mode="constant",
                      value=0.0)

        # Pass through product layer
        x = self.prod(x)
        return x

    def _get_base_distribution(self):
        raise Exception(
            "IndependentMultivariate does not have an explicit PyTorch base distribution."
        )

    def sample(self,
               n: int = None,
               context: SamplingContext = None) -> torch.Tensor:
        context = self.prod.sample(context=context)

        # Remove padding
        if self._pad:
            context.parent_indices = context.parent_indices[:, :-self._pad]

        samples = self.base_leaf.sample(context=context)
        return samples

    def __repr__(self):
        return f"IndependentMultivariate(in_features={self.in_features}, out_channels={self.out_channels}, dropout={self.dropout}, cardinality={self.cardinality}, out_shape={self.out_shape})"