Example #1
0
 def __init__(self,
              n_input_feat,
              n_output_feat,
              n_size,
              init='glorot_uniform',
              activation='relu',
              activation_first=True,
              **kwargs):
     """
 Parameters
 ----------
 n_input_feat: int
   Number of input channels
 n_output_feat: int
   Number of output channels
 n_size: int
   Number of filter size(full length)
 init: str, optional
   Weight initialization for filters.
 activation: str, optional
   Activation function applied
 activation_first: bool, optional
   If to apply activation before convolution
 """
     self.init = initializations.get(init)  # Set weight initialization
     self.activation = activations.get(activation)  # Get activations
     self.n_input_feat = n_input_feat
     self.n_output_feat = n_output_feat
     self.n_size = n_size
     self.activation_first = activation_first
     super(Conv2DUp, self).__init__(**kwargs)
Example #2
0
    def __init__(self,
                 n_input_feat,
                 n_output=2,
                 init='glorot_uniform',
                 activation='relu',
                 **kwargs):
        """
    Parameters
    ----------
    n_input_feat: int
      Number of input channels
    n_output: int, optional
      Number of output channels: 2 for classification, 1 for regression
    init: str, optional
      Weight initialization for filters.
    activation: str, optional
      Activation function applied

    """

        self.n_input_feat = n_input_feat
        self.n_output = n_output
        self.init = initializations.get(init)  # Set weight initialization
        self.activation = activations.get(activation)  # Get activations
        super(ContactMapGather, self).__init__(**kwargs)
Example #3
0
    def __init__(self,
                 n_input_feat,
                 n_output_feat,
                 n_size=3,
                 rate=[6, 12, 18, 24],
                 init='glorot_uniform',
                 activation='relu',
                 **kwargs):
        """
    Parameters
    ----------
    n_input_feat: int
      Number of input channels
    n_output_feat: int
      Number of output channels for each Atrous component
    n_size: int
      Number of filter size(full length)
    rate: int
      Rate of each atrous convolution
    init: str, optional
      Weight initialization for filters.
    activation: str, optional
      Activation function applied
    activation_first: bool, optional
      If to apply activation before convolution

    """
        self.init = initializations.get(init)  # Set weight initialization
        self.activation = activations.get(activation)  # Get activations
        self.n_input_feat = n_input_feat
        self.n_output_feat = n_output_feat
        self.n_size = n_size
        self.rate = rate
        super(Conv2DASPP, self).__init__(**kwargs)
Example #4
0
 def __init__(self,
              pos_start=0,
              pos_end=25,
              embedding_length=50,
              init='glorot_uniform',
              activation='relu',
              **kwargs):
     """
 Parameters
 ----------
 pos_start: int, optional
   starting position of raw features that need embedding
 pos_end: int, optional
   ending position
 embedding_length: int, optional
   length for embedding
 init: str, optional
   Weight initialization for filters.
 activation: str, optional
   Activation function applied
 """
     self.init = initializations.get(init)  # Set weight initialization
     self.activation = activations.get(activation)  # Get activations
     self.pos_start = pos_start
     self.pos_end = pos_end
     self.embedding_length = embedding_length
     super(ResidueEmbedding, self).__init__(**kwargs)
Example #5
0
    def __init__(self,
                 n_input_feat,
                 n_output_feat,
                 n_size,
                 init='glorot_uniform',
                 activation='relu',
                 **kwargs):
        """
    Parameters
    ----------
    n_input_feat: int
      Number of input channels
    n_output_feat: int
      Number of output channels
    n_size: int
      Number of filter size(full length)
    init: str, optional
      Weight initialization for filters.
    activation: str, optional
      Activation function applied

    """
        self.init = initializations.get(init)  # Set weight initialization
        self.activation = activations.get(activation)  # Get activations
        self.n_input_feat = n_input_feat
        self.n_output_feat = n_output_feat
        self.n_size = n_size
        super(Conv2DLayer_RaptorX, self).__init__(**kwargs)
Example #6
0
    def __init__(self,
                 n_input_feat,
                 n_output_feat,
                 n_size,
                 rate,
                 init='glorot_uniform',
                 activation='relu',
                 activation_first=True,
                 dropout=None,
                 **kwargs):
        """
    Parameters
    ----------
    n_input_feat: int
      Number of input channels
    n_output_feat: int
      Number of output channels
    n_size: int
      Number of filter size(full length)
    rate: int
      Rate of atrous convolution
    init: str, optional
      Weight initialization for filters.
    activation: str, optional
      Activation function applied
    dropout: float, optional
      Dropout probability, not supported here

    """
        self.init = initializations.get(init)  # Set weight initialization
        self.activation = activations.get(activation)  # Get activations
        self.n_input_feat = n_input_feat
        self.n_output_feat = n_output_feat
        self.n_size = n_size
        self.rate = rate
        self.activation_first = activation_first
        super(DiagConv2DAtrous, self).__init__(**kwargs)