def __init__(self, in_features, kernel_size, padding): super(ResBlock2d, self).__init__() self.conv1 = nn.Conv2d(in_channels=in_features, out_channels=in_features, kernel_size=kernel_size, padding=padding) self.conv2 = nn.Conv2d(in_channels=in_features, out_channels=in_features, kernel_size=kernel_size, padding=padding) self.norm1 = nn.BatchNorm2d(in_features) self.norm2 = nn.BatchNorm2d(in_features)
def __init__(self, ch_in, ch_out, filter_size=3, stride=1, groups=1, padding=0, act="leaky", name=None): super(ConvBNLayer, self).__init__() self.conv = nn.Conv2d(in_channels=ch_in, out_channels=ch_out, kernel_size=filter_size, stride=stride, padding=padding, groups=groups, weight_attr=ParamAttr(name=name + '.conv.weights'), bias_attr=False) bn_name = name + '.bn' self.batch_norm = nn.BatchNorm2d( ch_out, weight_attr=ParamAttr(name=bn_name + '.scale', regularizer=L2Decay(0.)), bias_attr=ParamAttr(name=bn_name + '.offset', regularizer=L2Decay(0.))) self.act = act
def __init__( self, in_channels, out_channels, **kwargs ): super(BasicConv2d, self).__init__() self.conv = nn.Conv2d(in_channels, out_channels, bias=False, **kwargs) self.bn = nn.BatchNorm2d(out_channels, eps=0.001)
def __init__(self, in_features, out_features, groups=1, kernel_size=3, padding=1): super(SameBlock2d, self).__init__() self.conv = nn.Conv2d(in_channels=in_features, out_channels=out_features, kernel_size=kernel_size, padding=padding, groups=groups) self.norm = nn.BatchNorm2d(out_features)
def __init__(self, in_features, out_features, kernel_size=3, padding=1, groups=1): super(DownBlock2d, self).__init__() self.conv = nn.Conv2d(in_channels=in_features, out_channels=out_features, kernel_size=kernel_size, padding=padding, groups=groups) self.norm = nn.BatchNorm2d(out_features) self.pool = nn.AvgPool2d(kernel_size=(2, 2))
def __init__(self, in_chan, out_chan, ks=3, stride=1, padding=1, *args, **kwargs): super(ConvBNReLU, self).__init__() self.conv = nn.Conv2d(in_chan, out_chan, kernel_size=ks, stride=stride, padding=padding, bias_attr=False) self.bn = nn.BatchNorm2d(out_chan) self.relu = nn.ReLU()
def __init__(self, ch_in, ch_out, filter_size=3, stride=1, groups=1, padding=0, act="leaky"): super(ConvBNLayer, self).__init__() self.conv = nn.Conv2d(in_channels=ch_in, out_channels=ch_out, kernel_size=filter_size, stride=stride, padding=padding, groups=groups, bias_attr=False) self.batch_norm = nn.BatchNorm2d( ch_out, weight_attr=ParamAttr(regularizer=L2Decay(0.)), bias_attr=ParamAttr(regularizer=L2Decay(0.))) self.act = act