def __init__(self, in_channels=3, withSkipConnections=True): """ :param in_channels: :param pretrained: :param withSkipConnections: """ super().__init__() self.in_channels = in_channels self.withSkipConnections = withSkipConnections self.down1 = segnetDown2(self.in_channels, 64, withFeatureMap=self.withSkipConnections) self.down2 = segnetDown2(64, 128, withFeatureMap=self.withSkipConnections) self.down3 = segnetDown3(128, 256, withFeatureMap=self.withSkipConnections) self.down4 = segnetDown3(256, 512, withFeatureMap=self.withSkipConnections) self.down5 = segnetDown3(512, 512, withFeatureMap=self.withSkipConnections)
def __init__(self, n_classes=21, in_channels=3, is_unpooling=True, pretrained=True, withSkipConnections=False, enablePermEq=True): super().__init__() self.in_channels = in_channels self.is_unpooling = is_unpooling self.withSkipConnections = withSkipConnections self.enablePermEq = enablePermEq self.down1 = segnetDown2(self.in_channels, 64, withFeatureMap=self.withSkipConnections) self.down2 = segnetDown2(64, 128, withFeatureMap=self.withSkipConnections) self.down3 = segnetDown3(128, 256, withFeatureMap=self.withSkipConnections) self.down4 = segnetDown3(256, 512, withFeatureMap=self.withSkipConnections) self.down5 = segnetDown3(512, 512, withFeatureMap=self.withSkipConnections) self.up5 = segnetUp3(512, 512, withSkipConnections=self.withSkipConnections) self.up4 = segnetUp3(512, 256, withSkipConnections=self.withSkipConnections) self.up3 = segnetUp3(256, 128, withSkipConnections=self.withSkipConnections) self.up2 = segnetUp2(128, 64, withSkipConnections=self.withSkipConnections) self.up1 = segnetUp2(64, n_classes, withSkipConnections=self.withSkipConnections) if pretrained: vgg16 = models.vgg16(pretrained=True) Arch = 'SetSegNet' if self.withSkipConnections: Arch = 'SetSegNetSkip' print( '[ INFO ]: Using pre-trained weights from VGG16 with {}. Permutation equivariant layers are {}.' .format(Arch, 'ENABLED' if self.enablePermEq else 'DISABLED')) self.init_vgg16_params(vgg16)
def __init__(self, out_channels=8, in_channels=3, pretrained=True, withSkipConnections=True): super().__init__() self.in_channels = in_channels self.withSkipConnections = withSkipConnections self.down1 = segnetDown2(self.in_channels, 64, withFeatureMap=self.withSkipConnections) self.down2 = segnetDown2(64, 128, withFeatureMap=self.withSkipConnections) self.down3 = segnetDown3(128, 256, withFeatureMap=self.withSkipConnections) self.down4 = segnetDown3(256, 512, withFeatureMap=self.withSkipConnections) self.down5 = segnetDown3(512, 512, withFeatureMap=self.withSkipConnections) self.up5 = segnetUp3(512, 512, withSkipConnections=self.withSkipConnections) self.up4 = segnetUp3(512, 256, withSkipConnections=self.withSkipConnections) self.up3 = segnetUp3(256, 128, withSkipConnections=self.withSkipConnections) self.up2 = segnetUp2(128, 64, withSkipConnections=self.withSkipConnections) self.up1 = segnetUp2(64, out_channels, withSkipConnections=self.withSkipConnections) if pretrained: vgg16 = models.vgg16(pretrained=True) Arch = 'SegNet' if self.withSkipConnections: Arch = 'SegNetSkip' print('[ INFO ]: Using pre-trained weights from VGG16 with {}.'. format(Arch)) self.init_vgg16_params(vgg16)
def __init__(self, out_channels=8, in_channels=3, pretrained=True, withSkipConnections=True, new_version=False, additional=None): """ :param out_channels: :param in_channels: :param pretrained: :param withSkipConnections: :param new_version: :param additional: all additional output layer are new version """ super().__init__() self.in_channels = in_channels self.withSkipConnections = withSkipConnections self.down1 = segnetDown2(self.in_channels, 64, withFeatureMap=self.withSkipConnections) self.down2 = segnetDown2(64, 128, withFeatureMap=self.withSkipConnections) self.down3 = segnetDown3(128, 256, withFeatureMap=self.withSkipConnections) self.down4 = segnetDown3(256, 512, withFeatureMap=self.withSkipConnections) self.down5 = segnetDown3(512, 512, withFeatureMap=self.withSkipConnections) self.up5 = segnetUp3(512, 512, withSkipConnections=self.withSkipConnections) self.up4 = segnetUp3(512, 256, withSkipConnections=self.withSkipConnections) self.up3 = segnetUp3(256, 128, withSkipConnections=self.withSkipConnections) self.up2 = segnetUp2(128, 64, withSkipConnections=self.withSkipConnections) self.up1 = segnetUp2(64, out_channels, last_layer=True if new_version else False, withSkipConnections=self.withSkipConnections) if additional is not None: self.additional_last_layer = segnetUp2( 64, additional, last_layer=True, withSkipConnections=self.withSkipConnections) self.additional = True else: self.additional = False if pretrained: vgg16 = models.vgg16(pretrained=True) Arch = 'SegNet' if self.withSkipConnections: Arch = 'SegNetSkip' print('[ INFO ]: Using pre-trained weights from VGG16 with {}.'. format(Arch)) self.init_vgg16_params(vgg16)