Esempio n. 1
0
 def forward(self, conv_high, conv_low):
     _, _, A, B, C = conv_high.size()
     global_att = self.avg_pool(conv_high).to(dtype=torch.float).cuda()
     global_att = self.sigmoid(self.conv1x1x1(global_att))
     reweighted_conv_low = conv_low * global_att
     reweighted_conv_low = Crop(reweighted_conv_low, [A, B, C])
     reweighted_conv_low = self.reshape(reweighted_conv_low)
     return conv_high + reweighted_conv_low
Esempio n. 2
0
 def forward(self, input):
     x_dilated = self.dilated_conv(input)
     x_normal = self.normal_conv(input)
     _, _, a, b, c = x_dilated.size()
     x_normal = Crop(x_normal, [a, b, c])
     x_normal = x_normal
     out = x_dilated + x_normal
     return out
Esempio n. 3
0
 def forward(self, x):
     identity = x
     out = self.conv_relu(x)
     out = self.conv(out)
     _, _, a, b, c = out.size()
     identity = Crop(identity, [a, b, c])
     identity = self.reshape(identity)
     out += identity
     out = self.relu(out)
     return out
Esempio n. 4
0
 def forward(self, x):
     identity = x
     out = self.conv_relu(x)
     if self.out_channels_2 != 0:
         out = self.conv(out)
         _, _, a, b, c = out.size()
         identity = Crop(identity, [a, b, c])
         identity = self.conv1x1(identity)
         out += identity
         out = self.relu(out)
     return out
Esempio n. 5
0
    def forward(self, x, args, triple=False):

        cropped_x = Crop(x, [27, 27, 27])

        #normal_path
        x_normal = self.normal_path(cropped_x)

        #dilated_path
        x_dilated = self.dilated_path(x)

        x_merge = torch.cat((x_normal, x_dilated), 1)

        if triple:
            att = AdaDropout(100, self.wrs_ratio, self.test_state)
            x_FC = self.FC(att(x_merge))
        else:
            x_FC = self.FC(x_merge)

        output = self.classification(x_FC)
        return output