Exemple #1
0
 def __init__(self, model="ProgressiveDilated", model_path=None, kernel_size=3, num_layers=1,
              train_unet_decoder=False, train_unet=False):
     super().__init__()
     
     if model == "ProgressiveDilated":
         self.unet = UNet_ProgressiveDilated(in_channels=1, out_channels=3)
     elif model == "Dilated":
         self.unet = UNet_Dilated(in_channels=1, out_channels=3)
     elif model == "Dilated":
         self.unet = UNet_Baseline(in_channels=1, out_channels=3)
     elif model == "Original_with_BatchNorm":
         self.unet = UNet_Original_with_BatchNorm(in_channels=1, out_channels=3)
     else:
         self.unet = UNet_Original(in_channels=1, out_channels=3)
         
     if model_path:
         print("load model -- mode: {}".format(model))
         self.unet.load_state_dict(torch.load(model_path))
     
     self.train_unet_decoder = train_unet_decoder
     self.train_unet = train_unet
     
     self.convlstm_forward = ConvLSTM(input_size=(256, 256), input_dim=32, 
                                      hidden_dim=32, kernel_size=(kernel_size, kernel_size), 
                                      num_layers=num_layers, batch_first=False, 
                                      bias=True, return_all_layers=False)
     
     self.convlstm_backward = ConvLSTM(input_size=(256, 256), input_dim=32, 
                                      hidden_dim=32, kernel_size=(kernel_size, kernel_size), 
                                      num_layers=num_layers, batch_first=False, 
                                      bias=True, return_all_layers=False)
     
     self.last_conv = nn.Conv2d(in_channels=32*2, out_channels=3, kernel_size=3, padding=1)
Exemple #2
0
 def __init__(self, t_shift, model="ProgressiveDilated", model_path=None, kernel_size=3, 
              train_unet_decoder=False, train_unet=False):
     super().__init__()
     
     if model == "ProgressiveDilated":
         self.unet = UNet_ProgressiveDilated(in_channels=1, out_channels=3)
     elif model == "Dilated":
         self.unet = UNet_Dilated(in_channels=1, out_channels=3)
     elif model == "Baseline":
         self.unet = UNet_Baseline(in_channels=1, out_channels=3)
     elif model == "Original_with_BatchNorm":
         self.unet = UNet_Original_with_BatchNorm(in_channels=1, out_channels=3)
     else:
         self.unet = UNet_Original(in_channels=1, out_channels=3)
         
     if model_path:
         print("load model -- mode: {}".format(model))
         self.unet.load_state_dict(torch.load(model_path))
     
     self.train_unet_decoder = train_unet_decoder
     self.train_unet = train_unet
         
     self.conv_temp_block = nn.Sequential(
         nn.Conv3d(in_channels=32, out_channels=32, kernel_size=(t_shift, kernel_size, kernel_size), 
                   padding=( int((t_shift - 1)/2), int( (kernel_size-1) /2), int( (kernel_size-1) /2) )),
         nn.BatchNorm3d(32),
         nn.PReLU(),
         
         nn.Conv3d(in_channels=32, out_channels=32, kernel_size=(t_shift, kernel_size, kernel_size), 
                   padding=( int((t_shift - 1)/2), int( (kernel_size-1) /2), int( (kernel_size-1) /2) ))
     )
     self.last_conv = nn.Conv2d(in_channels=32, out_channels=3, kernel_size=3, padding=1)