def __init__(self, inp, oup, stride, kernel=3, expand_ratio=1): """Construct InvertedResidual class. :param inp: input channel :param oup: output channel :param stride: stride :param kernel: kernel :param expand_ratio: channel increase multiplier """ super(InvertedConv, self).__init__() hidden_dim = round(inp * expand_ratio) conv = [] if expand_ratio > 1: conv = [ ops.Conv2d(in_channels=inp, out_channels=hidden_dim, kernel_size=1, stride=1, padding=0, bias=False), ops.BatchNorm2d(num_features=hidden_dim), ops.Relu6(inplace=True) ] conv = conv + [ ops.Conv2d(in_channels=hidden_dim, out_channels=hidden_dim, kernel_size=kernel, stride=stride, padding=kernel // 2, groups=hidden_dim, bias=False, depthwise=True), ops.BatchNorm2d(num_features=hidden_dim), ops.Relu6(inplace=True), ops.Conv2d(in_channels=hidden_dim, out_channels=oup, kernel_size=1, stride=1, padding=0, bias=False), ops.BatchNorm2d(num_features=oup) ] self.models = Sequential(*conv)
def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True, activation='relu', inplace=False): """Construct ConvBnAct class.""" super(ConvBnAct, self).__init__() self.conv2d = ops.Conv2d(C_in, C_out, kernel_size, stride, padding, bias=False) self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine) if activation == 'hswish': self.act = ops.Hswish(inplace=inplace) elif activation == 'hsigmoid': self.act = ops.Hsigmoid(inplace=inplace) elif activation == 'relu6': self.act = ops.Relu6(inplace=inplace) else: self.act = ops.Relu(inplace=inplace)
def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True, use_relu6=False): """Construct ConvBnRelu class.""" super(ConvBnRelu, self).__init__() self.conv2d = ops.Conv2d( C_in, C_out, kernel_size, stride=stride, padding=padding, bias=False) self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine) if use_relu6: self.relu = ops.Relu6(inplace=False) else: self.relu = ops.Relu(inplace=False)
def __init__(self, C_in, C_out, kernel_size, stride, padding, Conv2d='Conv2d', affine=True, use_relu6=False, norm_layer='BN', has_bn=True, has_relu=True, **kwargs): """Construct ConvBnRelu class.""" super(ConvBnRelu, self).__init__() if Conv2d == 'Conv2d': self.conv2d = ops.Conv2d(C_in, C_out, kernel_size, stride=stride, padding=padding, bias=False) elif Conv2d == 'ConvWS2d': self.conv2d = ops.ConvWS2d(C_in, C_out, kernel_size, stride=stride, padding=padding, bias=False) if has_bn: if norm_layer == 'BN': self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine) elif norm_layer == 'GN': num_groups = kwargs.pop('num_groups') self.batch_norm2d = ops.GroupNorm(num_groups, C_out, affine=affine) elif norm_layer == 'Sync': self.batch_norm2d = ops.SyncBatchNorm(C_out, affine=affine) if has_relu: if use_relu6: self.relu = ops.Relu6(inplace=False) else: self.relu = ops.Relu(inplace=False)