def __init__(self, maxdisp=192): super(Disp, self).__init__() self.maxdisp = maxdisp self.softmax = nn.Softmin(dim=1) self.disparity = DisparityRegression(maxdisp=int(self.maxdisp)) # self.conv32x1 = BasicConv(32, 1, kernel_size=3) self.conv32x1 = nn.Conv3d(32, 1, (3, 3, 3), (1, 1, 1), (1, 1, 1), bias=False)
def __init__(self, maxdisp=192): super(DispAgg, self).__init__() self.maxdisp = maxdisp self.LGA3 = LGA3(radius=2) self.LGA2 = LGA2(radius=2) self.LGA = LGA(radius=2) self.softmax = nn.Softmin(dim=1) self.disparity = DisparityRegression(maxdisp=self.maxdisp) # self.conv32x1 = BasicConv(32, 1, kernel_size=3) self.conv32x1=nn.Conv3d(32, 1, (3, 3, 3), (1, 1, 1), (1, 1, 1), bias=False)
def __init__(self, maxdisp=192): super(DispAgg, self).__init__() self.maxdisp = maxdisp self.LGA3 = LGA3(radius=2) self.LGA2 = LGA2(radius=2) self.LGA = LGA(radius=2) self.softmax = nn.Softmin(dim=1) self.disparity = DisparityRegression(maxdisp=int(self.maxdisp)) self.conv64x1 = nn.Conv3d(64, 1, (3, 3, 3), (1, 1, 1), (1, 1, 1), bias=False)
def __init__(self, maxdisp=192, InChannel=32): super(Disp, self).__init__() self.maxdisp = maxdisp self.softmax = nn.Softmin(dim=1) self.disparity = DisparityRegression(maxdisp=self.maxdisp) # self.conv32x1 = BasicConv(32, 1, kernel_size=3) if InChannel == 64: self.conv3d_2d = nn.Conv3d(InChannel, 1, (1, 1, 1), (1, 1, 1), (1, 1, 1), bias=False) else: self.conv3d_2d = nn.Conv3d(InChannel, 1, (3, 3, 3), (1, 1, 1), (1, 1, 1), bias=False)