コード例 #1
0
 def __init__(self, in_chan, out_chan, stride=1):
     super(BasicBlock, self).__init__()
     self.conv1 = conv3x3(in_chan, out_chan, stride)
     self.bn1 = BatchNorm2d(out_chan)
     self.conv2 = conv3x3(out_chan, out_chan)
     self.bn2 = BatchNorm2d(out_chan, activation='none')
     self.relu = nn.ReLU(inplace=True)
     self.downsample = None
     if in_chan != out_chan or stride != 1:
         self.downsample = nn.Sequential(
             nn.Conv2d(in_chan, out_chan,
                       kernel_size=1, stride=stride, bias=False),
             BatchNorm2d(out_chan, activation='none'),
             )
コード例 #2
0
ファイル: model.py プロジェクト: zhixuanli/BiSeNet-1
 def __init__(self, in_chan, out_chan, ks=3, stride=1, padding=1, *args, **kwargs):
     super(ConvBNReLU, self).__init__()
     self.conv = nn.Conv2d(in_chan,
             out_chan,
             kernel_size = ks,
             stride = stride,
             padding = padding,
             bias = False)
     self.bn = BatchNorm2d(out_chan)
     self.init_weight()
コード例 #3
0
 def __init__(self, in_chan, out_chan, *args, **kwargs):
     super(AttentionRefinementModule, self).__init__()
     self.conv = ConvBNReLU(in_chan, out_chan, ks=3, stride=1, padding=1)
     self.conv_atten = nn.Conv2d(out_chan,
                                 out_chan,
                                 kernel_size=1,
                                 bias=False)
     self.bn_atten = BatchNorm2d(out_chan, activation='none')
     self.sigmoid_atten = nn.Sigmoid()
     self.init_weight()
コード例 #4
0
 def __init__(self):
     super(Resnet18, self).__init__()
     self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
                            bias=False)
     self.bn1 = BatchNorm2d(64)
     self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
     self.layer1 = create_layer_basic(64, 64, bnum=2, stride=1)
     self.layer2 = create_layer_basic(64, 128, bnum=2, stride=2)
     self.layer3 = create_layer_basic(128, 256, bnum=2, stride=2)
     self.layer4 = create_layer_basic(256, 512, bnum=2, stride=2)
     self.init_weight()
コード例 #5
0
 def __init__(self,
              in_planes,
              out_planes,
              ksize,
              stride,
              pad,
              dilation=1,
              groups=1,
              has_bias=False):
     super(ConvBnRelu, self).__init__()
     self.conv = nn.Conv2d(in_planes,
                           out_planes,
                           kernel_size=ksize,
                           stride=stride,
                           padding=pad,
                           dilation=dilation,
                           groups=groups,
                           bias=has_bias)
     self.bn = BatchNorm2d(out_planes)