Beispiel #1
0
    def __init__(self,
                 dim,
                 window,
                 activation='relu',
                 num_layers=2,
                 pooling_dim=1):
        """

        :param dim: int
            final dimension of the protein representation
        :param activation:
            non-linearity to apply to logits
        :param window:
            max size of grouped amino acids
        :param num_layers: int
            Number of convolution layers
        :param pooling_dim: int
            The dimension to be used in reducing protein segments to form a vector representation of the protein.
        """
        super(ProteinCNN, self).__init__()
        self.activation = get_activation_func(activation)
        self.pooling_dim = pooling_dim
        self.lin_kernels = nn.ModuleList([
            nn.Linear(dim * window, dim * window)
            for _ in range(num_layers - 1)
        ])
        self.lin_kernels.append(nn.Linear(dim * window, dim))
Beispiel #2
0
    def __init__(self, activation=None):
        super(Layer, self).__init__()

        if activation and isinstance(activation, str):
            self.activation = get_activation_func(activation)
        else:
            self.activation = activation
Beispiel #3
0
 def __init__(self, in_features, out_features, dprob, activation='relu'):
     super(ResBlock, self).__init__()
     self.activation = get_activation_func(activation)
     self.net1 = nn.Sequential(nn.Linear(in_features, out_features),
                               nn.BatchNorm1d(out_features), nn.ReLU(),
                               nn.Dropout(dprob))
     self.net2 = nn.Sequential(nn.Linear(out_features, out_features),
                               nn.BatchNorm1d(out_features), nn.ReLU(),
                               nn.Dropout(dprob))
     self.bn_out = nn.BatchNorm1d(out_features)
Beispiel #4
0
 def __init__(self,
              num_fingerprints,
              embedding_dim,
              num_layers=3,
              update='mean',
              activation='relu'):
     super(GraphNeuralNet, self).__init__()
     self.output = update
     self.activation = get_activation_func(activation)
     self.embed_fingerprint = nn.Embedding(num_fingerprints,
                                           int(embedding_dim))
     self.W_gnn = nn.ModuleList([
         nn.Linear(embedding_dim, embedding_dim) for _ in range(num_layers)
     ])
Beispiel #5
0
 def __init__(self,
              in_dim,
              hidden_dim,
              dropout,
              num_layers=1,
              bidrectional=False,
              activation='relu',
              batch_first=False):
     super(ProteinRNN, self).__init__()
     self.hidden_dim = int(hidden_dim)
     self.num_layers = int(num_layers)
     self.directions = max(1, int(bidrectional) + 1)
     self.activation = get_activation_func(activation)
     self.batch_first = batch_first
     if num_layers == 1:
         dropout = 0
     self.model = nn.LSTM(input_size=int(in_dim),
                          hidden_size=int(self.hidden_dim),
                          num_layers=int(self.num_layers),
                          batch_first=batch_first,
                          dropout=dropout,
                          bidirectional=bidrectional)
Beispiel #6
0
    def __init__(self,
                 protein_profile,
                 vocab_size,
                 embedding_dim,
                 activation='relu',
                 batch_first=False):
        """

        :param protein_profile:
        :param vocab_size: int
            The number of ngrams formed (excluding padding index). An additional row is added to the end of the
            embedding table constructed as a padding index.
        :param embedding_dim:
        :param activation:
        :param batch_first:
        """
        super(Prot2Vec, self).__init__()
        self._batch_first = batch_first
        self.protein_profile = protein_profile
        self.embedding = nn.Embedding(vocab_size + 1,
                                      int(embedding_dim),
                                      padding_idx=vocab_size)
        self.activation = get_activation_func(activation)
Beispiel #7
0
 def __init__(self, activation='relu'):
     super(SiameseNonlinearity, self).__init__()
     self._func = get_activation_func(activation)
Beispiel #8
0
 def __init__(self, dim1, dim2, out_dim, activation='relu'):
     super(PreSiameseLinear, self).__init__()
     self.linear1 = nn.Linear(dim1, out_features=out_dim)
     self.linear2 = nn.Linear(dim2, out_features=out_dim)
     self.activation = get_activation_func(activation)