Example #1
0
    def __init__(self):
        super(_2LayerScale3, self).__init__()
        ## depth rescaler: -1~1 -> min_deph~max_deph
   
        feature = 12

         
        self.layer_num =2
        #a side branch predict with original iamge with rectangular kernel
        #limit=1024
        self.side_branch1  =  nn.ModuleList()    
        
        self.side_branch1.append(  baseM.conv_dv_2(3,feature))# 256*256 - 128*128

        
      

        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature))# 128*128 - 64*128
        feature = feature *2
        
        #self.side_branch1.append(  conv_keep_all(feature, feature))
        self.side_branch1.append(  baseM.conv_dv_2(feature,2*feature))# 64*128  - 32*64
        feature = feature *2
        
         
        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature))# 32*64  - 16*64
        feature = feature *2
        
        #self.side_branch1.append(  conv_keep_all(feature, feature))
        #self.side_branch1.append(  conv_keep_all(feature, feature))

        self.side_branch1.append(  baseM.conv_dv_2(feature,2*feature))# 16*64  - 8*32
        feature = feature *2
        
       
        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature))# 8*32  - 4*32
        feature = feature *2
      

        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature,k=(3,1),s=(1,1),p=(0,0))) #2*32
        feature = feature *2


        self.fullout = nn.Sequential(
              
             nn.ConvTranspose2d(feature, 1,(1,8), (1,8), (0,0), bias=False)   #2*256
             #nn.ConvTranspose2d(feature, self.layer_num,(1,4), (1,4), (0,0), bias=False)    
             #nn.BatchNorm2d(1),
             #nn.LeakyReLU(0.1,inplace=True)
                                                    )
        self. low_scale_out = nn.Sequential(
              
             nn.ConvTranspose2d(feature, 1,(1,1), (1,1), (0,0), bias=False)  #2*32      
             #nn.BatchNorm2d(1),
             #nn.LeakyReLU(0.1,inplace=True)
               
                                 )   
Example #2
0
    def __init__(self):
        super(_2LayerScale1, self).__init__()
        ## depth rescaler: -1~1 -> min_deph~max_deph
   
        feature = 8

         
        self.layer_num =2
        #a side branch predict with original iamge with rectangular kernel
        # 256*256 - 128*256
        #limit=1024
        self.side_branch1  =  nn.ModuleList()    
        self.side_branch1.append(  baseM.conv_keep_W(3,feature))
        # 128*256 - 64*256
      

        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature))
        feature = feature *2
        # 64*256  - 32*256
        #self.side_branch1.append(  conv_keep_all(feature, feature))
        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature))
        feature = feature *2
        # 32*256  - 16*256
         
        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature))
        feature = feature *2
        # 16*256  - 8*256
        #self.side_branch1.append(  conv_keep_all(feature, feature))
        #self.side_branch1.append(  conv_keep_all(feature, feature))

        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature))
        feature = feature *2
        # 8*256  - 4*256
       
        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature)) # 4*256
        feature = feature *2
      

        self.side_branch1.append(  baseM.conv_keep_W(feature,2*feature,k=(4,1),s=(1,1),p=(0,0)))  # 2*256
         
        feature = feature *2
        #self.fullout = nn.Sequential(
              
        #     nn.Conv2d(feature, 256,(1,1), (1,1), (0,0), bias=False)   #2*256       
        #     #nn.BatchNorm2d(1),
        #     #nn.LeakyReLU(0.1,inplace=True)
        #                                            )
        self. low_scale_out = nn.Sequential(
              
             nn.Conv2d(feature, 2 ,(1,1), (1,1), (0,0), bias=False) #2*64           
             #nn.BatchNorm2d(1),
             #nn.LeakyReLU(0.1,inplace=True)
               
                                 )