def forward(self, input): ''' Forward pass of the function. ''' if torch.__version__ >= '1.9': return F.mish(input) else: return Func.mish(input)
def forward(self, x): # make sure the input tensor is flattened x = x.view(x.shape[0], -1) # apply Mila activation function if (self.activation == MISH): x = Func.mish(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = F.log_softmax(self.fc4(x), dim=1) return x
def forward(self, input): ''' Forward pass of the function. ''' return Func.mish(input)