Esempio n. 1
0
def conv_1x1_bn(inp, oup):
    weight = weight_variable()
    return nn.SequentialCell([
        nn.Conv2d(in_channels=inp, out_channels=oup, kernel_size=1, stride=1, padding=0, weight_init=weight,
                  has_bias=False),
        nn.BatchNorm2d(oup, eps=0.001),
        nn.HSwish()])
Esempio n. 2
0
def conv_3x3_bn(inp, oup, stride):
    weight = weight_variable()
    return nn.SequentialCell([
        nn.Conv2d(in_channels=inp, out_channels=oup, kernel_size=3, stride=stride, padding=1, weight_init=weight,
                  has_bias=False, pad_mode='pad'),
        nn.BatchNorm2d(oup, eps=0.001),  #, momentum=0.1),
        nn.HSwish()])
Esempio n. 3
0
 def __init__(self, act_func):
     super(Activation, self).__init__()
     if act_func == 'relu':
         self.act = nn.ReLU()
     elif act_func == 'relu6':
         self.act = nn.ReLU6()
     elif act_func in ('hsigmoid', 'hard_sigmoid'):
         self.act = MyHSigmoid()  # nn.HSigmoid()
     elif act_func in ('hswish', 'hard_swish'):
         self.act = nn.HSwish()
     else:
         raise NotImplementedError
Esempio n. 4
0
def Act(type='default'):
    if type in ['default', 'def']:
        return Act(DEFAULTS['activation'])
    if type == 'relu':
        return nn.ReLU()
    elif type == 'sigmoid':
        return nn.Sigmoid()
    elif type == 'hswish':
        return nn.HSwish()
    elif type == 'leaky_relu':
        return nn.LeakyReLU(alpha=DEFAULTS['leaky_relu']['alpha'])
    else:
        raise ValueError("Unsupported activation type: %s" % type)