def forward(self, x):
     x = self._conv(x)
     x = self._batch_norm(x)
     if self.act == "relu":
         x = F.relu(x)
     elif self.act == "relu6":
         x = F.relu6(x)
     return x
 def forward(self, x):
     x = self._conv(x)
     x = self._batch_norm(x)
     if self.act == "relu":
         x = F.relu(x)
     elif self.act == "relu6":
         x = F.relu6(x)
     elif self.act == 'leaky':
         x = F.leaky_relu(x)
     elif self.act == 'hard_swish':
         x = hard_swish(x)
     return x
 def forward(self, x):
     x = self.conv(x)
     x = self.bn(x)
     if self.act is not None:
         if self.act == "relu":
             x = F.relu(x)
         elif self.act == "relu6":
             x = F.relu6(x)
         elif self.act == "hard_swish":
             x = F.hardswish(x)
         else:
             raise NotImplementedError(
                 "The activation function is selected incorrectly.")
     return x
Exemple #4
0
 def forward(self, inputs, if_act=True):
     y = self._conv(inputs)
     y = self._batch_norm(y)
     if if_act:
         y = F.relu6(y)
     return y
def hard_swish(x):
    return x * F.relu6(x + 3) / 6.
 def forward(self, x):
     x = self.dw_conv(x)
     x = F.relu6(self.bn(x))
     x = self.pw_conv(x)
     return x
Exemple #7
0
 def forward(self, inputs: paddle.Tensor, if_act: bool = True):
     y = self._conv(inputs)
     y = self._batch_norm(y)
     if if_act:
         y = F.relu6(y)
     return y