Beispiel #1
0
 def forward(self, x, m):
     """
      NOTE: this method works only with  subclasses, which initialize main_net
     """
     x = m * self.main_net(x)
     if isinstance(self, BaseClassifier):
         out = Sigmoid()(x)
     out = out.view(-1, 1)
     return None, out
Beispiel #2
0
    def forward(self, x, m):
        x = self.main_net(x)
        x_det = torch.transpose(m * self.detector(x), 2, 1)

        w = Softmax(dim=2)(x_det)
        w = WeightsDropout(p=self.dropout)(w)

        x = torch.bmm(w, x)
        out = self.estimator(x)
        if isinstance(self, BaseClassifier):
            out = Sigmoid()(out)
        out = out.view(-1, 1)
        return w, out
Beispiel #3
0
    def forward(self, x: torch.Tensor,
                m: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
        """
        
        Feed forward input data.
        Parameters
        ----------
        x: torch.Tensor
        m: torch.Tensor
        Returns
        --------
        Tuple with weights of conformers and tensor
        of shape Nmol*1, where Nmol is the number of molecules. The tensor is final output y, but it needs to be passed
        to sigmoid to obtain final class probabilities in case of classification (recall, this classs shouldnt be called directly,
        call regressor/classifier subclass to obtain final y).

        Examples
        --------
        >> > import torch
        >> > import numpy as np
        >> > from torch import randn
        >> > from miqsar.estimators.attention_nets import AttentionNet
        >> > x_train = randn((3, 3, 3))
        >> > at_net = AttentionNet(ndim=(x_train[0].shape[-1], 4, 6, 4), det_ndim = (4,4), init_cuda=False)
        >> > _, m = at_net.add_padding(x_train)
        >> > m = torch.from_numpy(m.astype('float32'))
        >> > _ = at_net.forward(x_train, m)  # (assign result to a variable to supress std output)

        """
        x = self.main_net(x)
        x_det = torch.transpose(m * self.detector(x), 2, 1)

        w = nn.functional.gumbel_softmax(x_det,
                                         tau=self.instance_dropout,
                                         dim=2)

        x = torch.bmm(w, x)
        out = self.estimator(x)
        if isinstance(self, BaseClassifier):
            out = Sigmoid()(out)
        out = out.view(-1, 1)
        return w, out
Beispiel #4
0
 def forward(self, x, m):
     x = m * self.main_net(x)
     if isinstance(self, BaseClassifier):
         out = Sigmoid()(x)
     out = out.view(-1, 1)
     return None, out