コード例 #1
0
    def __init__(self, maxdisp=192):
        super(GANet, self).__init__()
        self.maxdisp = maxdisp
        self.conv_start = nn.Sequential(
            BasicConv(3, 16, kernel_size=3, padding=1),
            BasicConv(16, 32, kernel_size=3, padding=1))

        self.conv_x = BasicConv(32, 32, kernel_size=3, padding=1)
        self.conv_y = BasicConv(32, 32, kernel_size=3, padding=1)
        self.conv_refine = nn.Conv2d(32,
                                     32, (3, 3), (1, 1), (1, 1),
                                     bias=False)
        self.bn_relu = nn.Sequential(BatchNorm2d(32), nn.ReLU(inplace=True))
        self.feature = Feature()
        self.guidance = Guidance()
        self.cost_agg = CostAggregation(self.maxdisp)
        self.cv = GetCostVolume(int(self.maxdisp / 3))

        for m in self.modules():
            if isinstance(m, (nn.Conv2d, nn.Conv3d)):
                nn.init.kaiming_normal_(m.weight,
                                        mode='fan_out',
                                        nonlinearity='relu')
            elif isinstance(m, (BatchNorm2d, BatchNorm3d)):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)
コード例 #2
0
 def __init__(self,
              in_channels,
              out_channels,
              deconv=False,
              is_3d=False,
              bn=True,
              relu=True,
              **kwargs):
     super(BasicConv, self).__init__()
     #        print(in_channels, out_channels, deconv, is_3d, bn, relu, kwargs)
     self.relu = relu
     self.use_bn = bn
     if is_3d:
         if deconv:
             self.conv = nn.ConvTranspose3d(in_channels,
                                            out_channels,
                                            bias=False,
                                            **kwargs)
         else:
             self.conv = nn.Conv3d(in_channels,
                                   out_channels,
                                   bias=False,
                                   **kwargs)
         self.bn = BatchNorm3d(out_channels)
     else:
         if deconv:
             self.conv = nn.ConvTranspose2d(in_channels,
                                            out_channels,
                                            bias=False,
                                            **kwargs)
         else:
             self.conv = nn.Conv2d(in_channels,
                                   out_channels,
                                   bias=False,
                                   **kwargs)
         self.bn = BatchNorm2d(out_channels)