Esempio n. 1
0
 def forward(self, input):
     """
     Forward pass of the function.
     """
     return Func.sineReLU(input, self.epsilon)
Esempio n. 2
0
    def forward(self, x):
        # make sure the input tensor is flattened
        x = x.view(x.shape[0], -1)

        # apply custom activation function
        if self.activation == WEIGHTED_TANH:
            x = self.fc1(x)
            if self.inplace:
                Func.weighted_tanh(x, weight=1, inplace=self.inplace)
            else:
                x = Func.weighted_tanh(x, weight=1, inplace=self.inplace)

        if self.activation == MISH:
            x = self.fc1(x)
            if self.inplace:
                Func.mish(x, inplace=self.inplace)
            else:
                x = Func.mish(x, inplace=self.inplace)

        if self.activation == SILU:
            x = self.fc1(x)
            if self.inplace:
                Func.silu(x, inplace=self.inplace)
            else:
                x = Func.silu(x, inplace=self.inplace)

        if self.activation == ARIA2:
            x = Func.aria2(self.fc1(x))

        if self.activation == ESWISH:
            x = Func.eswish(self.fc1(x))

        if self.activation == SWISH:
            x = Func.swish(self.fc1(x))

        if self.activation == BMISH:
            x = Func.beta_mish(self.fc1(x))

        if self.activation == ELISH:
            x = Func.elish(self.fc1(x))

        if self.activation == HELISH:
            x = Func.hard_elish(self.fc1(x))

        if self.activation == MILA:
            x = Func.mila(self.fc1(x))

        if self.activation == SINERELU:
            x = Func.sineReLU(self.fc1(x))

        if self.activation == FTS:
            x = Func.fts(self.fc1(x))

        if self.activation == SQNL:
            x = Func.sqnl(self.fc1(x))

        if self.activation == ISRU:
            x = Func.isru(self.fc1(x))

        if self.activation == ISRLU:
            x = Func.isrlu(self.fc1(x))

        if self.activation == BENTID:
            x = Func.bent_id(self.fc1(x))

        if self.activation == SC:
            x = Func.soft_clipping(self.fc1(x))

        x = F.relu(self.fc2(x))
        x = F.relu(self.fc3(x))
        x = F.log_softmax(self.fc4(x), dim=1)

        return x
Esempio n. 3
0
 def forward(self, input):
     '''
     Forward pass of the function.
     '''
     return Func.sineReLU(input, self.epsilon)