Beispiel #1
0
    def __init__(self, backbone='resnet101', output_stride=16, num_classes=21,
                    bn='bn', freeze_bn=False, modal_num=3):
        super(DeepLab, self).__init__()
        if backbone == 'drn':
            output_stride = 8
        self.best_iou = 0
        if bn == 'sync_bn':
            BatchNorm = SynchronizedBatchNorm2d
        elif bn == 'bn':
            BatchNorm = nn.BatchNorm2d
        elif bn == 'gn':
            BatchNorm = nn.GroupNorm
        else:
            raise NotImplementedError('batch norm choice {} is not implemented'.format(bn))

        self.backbone = build_backbone(backbone, output_stride, BatchNorm)

        # aspp/decoder-branches
        self.modal_num = modal_num
        self.aspps = []
        self.decoders = []
        for item in range(modal_num): 
            self.aspps.append(build_aspp(backbone, output_stride, BatchNorm))
            self.decoders.append(build_decoder(num_classes, backbone, BatchNorm))
        self.aspps = nn.ModuleList(self.aspps)
        self.decoders = nn.ModuleList(self.decoders)

        # attention-branch
        self.attention_decoder = build_attention_decoder(num_classes, modal_num, backbone, BatchNorm)

        if freeze_bn:
            self.freeze_bn()
Beispiel #2
0
    def __init__(self,
                 backbone='resnet101',
                 output_stride=16,
                 num_classes=21,
                 bn='bn',
                 freeze_bn=False):
        super(DeepLab, self).__init__()
        if backbone == 'drn':
            output_stride = 8
        self.best_iou = 0
        if bn == 'sync_bn':
            BatchNorm = SynchronizedBatchNorm2d
        # elif bn == 'sync_abn':
        #     BatchNorm = InPlaceABNSync
        elif bn == 'bn':
            BatchNorm = nn.BatchNorm2d
        # elif bn == 'abn':
        #     BatchNorm = InPlaceABN
        elif bn == 'gn':
            BatchNorm = nn.GroupNorm
        else:
            raise NotImplementedError(
                'batch norm choice {} is not implemented'.format(bn))

        self.backbone = build_backbone(backbone, output_stride, BatchNorm)
        # self.backbone._load_pretrained_model()
        self.aspp = build_aspp(backbone, output_stride, BatchNorm)
        self.decoder = build_decoder(num_classes, backbone, BatchNorm)

        if freeze_bn:
            self.freeze_bn()
Beispiel #3
0
    def __init__(self,
                 backbone='resnet',
                 n_in_channels=1,
                 output_stride=16,
                 num_classes=1,
                 n_bottleneck_channels=1,
                 sync_bn=True,
                 freeze_bn=False,
                 pretrained_backbone=False):
        super(DeepLabBottleNeck, self).__init__()
        if backbone == 'drn':
            output_stride = 8

        if sync_bn == True:
            BatchNorm = SynchronizedBatchNorm2d
        else:
            BatchNorm = nn.BatchNorm2d

        self.backbone = build_backbone(backbone, n_in_channels, output_stride,
                                       BatchNorm, pretrained_backbone)
        self.aspp = build_aspp(backbone, output_stride, BatchNorm)
        self.decoder = build_decoder(num_classes, backbone, BatchNorm,
                                     n_bottleneck_channels)
        self.activate_tanh = nn.Tanh()
        self.activate_sigmoid = nn.Sigmoid()

        self.freeze_bn = freeze_bn
Beispiel #4
0
    def __init__(self,
                 backbone='mobilenet',
                 output_stride=8,
                 num_classes=1,
                 sync_bn=True,
                 freeze_bn=False):
        super(ShadowNet2, self).__init__()
        if backbone == 'drn':
            output_stride = 8

        if sync_bn == True:
            BatchNorm = SynchronizedBatchNorm2d
        else:
            BatchNorm = nn.BatchNorm2d

        self.backbone = build_backbone(backbone, output_stride, BatchNorm)
        self.aspp = build_aspp(backbone, output_stride, BatchNorm)

        self.reduce1 = LayerConv(320, 256, 1, 1, 0, False)

        self.dsc = DSC_Module(256, 256)

        self.reduce2 = LayerConv(512, 256, 1, 1, 0, False)

        self.decoder = build_decoder(num_classes, backbone, BatchNorm)

        if freeze_bn:
            self.freeze_bn()
    def __init__(self,
                 backbone='resnet18',
                 in_channels=3,
                 output_stride=16,
                 num_classes=1,
                 aux_classes=3,
                 sync_bn=True,
                 freeze_bn=False,
                 pretrained=False,
                 fusion_type='fusion',
                 is_concat=False,
                 **kwargs):
        super(PairwiseDeepLab, self).__init__()
        if backbone == 'drn':
            output_stride = 8

        if sync_bn == True:
            BatchNorm = SynchronizedBatchNorm2d
        else:
            BatchNorm = nn.BatchNorm2d

        self.backbone = build_backbone(backbone, in_channels, output_stride,
                                       BatchNorm, pretrained)

        ## branch1
        self.aspp = build_aspp(backbone, output_stride, BatchNorm)
        self.decoder = build_decoder(num_classes, backbone, BatchNorm)

        ## branch2
        # self.br2_aspp = build_aspp(backbone, output_stride, BatchNorm)
        # self.br2_decoder = build_decoder(num_classes, backbone, BatchNorm)

        ## fusion
        self.fusion_type = fusion_type
        if self.fusion_type == 'attention_fusion':
            print('fusion_type is attention_fusion')
            self.fusion = build_attention_fusion(aux_classes,
                                                 backbone,
                                                 BatchNorm,
                                                 is_concat=is_concat)
        elif self.fusion_type == 'fusion':
            print('init fusion_type')
            self.fusion = build_fusion(aux_classes,
                                       backbone,
                                       BatchNorm,
                                       is_concat=is_concat)
        else:
            raise NotImplementedError

        if freeze_bn:
            self.freeze_bn()
Beispiel #6
0
    def __init__(self,
                 backbone='mobilenet',
                 output_stride=8,
                 num_classes=1,
                 sync_bn=True,
                 freeze_bn=False):
        super(ShadowNetUncertaintyGuide, self).__init__()
        if backbone == 'drn':
            output_stride = 8

        if sync_bn == True:
            BatchNorm = SynchronizedBatchNorm2d
        else:
            BatchNorm = nn.BatchNorm2d

        self.backbone = build_backbone(backbone, output_stride, BatchNorm)

        self.temp_predict = nn.Sequential(
            nn.Conv2d(320, 256, kernel_size=3, stride=1, padding=1,
                      bias=False), BatchNorm(256), nn.ReLU(),
            nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1,
                      bias=False), BatchNorm(256), nn.ReLU(),
            nn.Conv2d(256, num_classes, kernel_size=1, stride=1))
        self.temp_uncertainty = nn.Sequential(
            nn.Conv2d(320, 256, kernel_size=3, stride=1, padding=1,
                      bias=False), BatchNorm(256), nn.ReLU(),
            nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1,
                      bias=False), BatchNorm(256), nn.ReLU(),
            nn.Conv2d(256, num_classes, kernel_size=1, stride=1))

        self.aspp = build_aspp(backbone, output_stride, BatchNorm)

        self.reduce1 = LayerConv(320, 256, 1, 1, 0, False)

        self.dsc = DSC_Module(256, 256)

        self.reduce2 = LayerConv(512, 256, 1, 1, 0, False)

        self.decoder = build_decoder(num_classes, backbone, BatchNorm)

        # self.last_conv = nn.Sequential(nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1, bias=False),
        #                                BatchNorm(256),
        #                                nn.ReLU(),
        #                                # nn.Dropout(0.5),
        #                                nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1, bias=False),
        #                                BatchNorm(256),
        #                                nn.ReLU(),
        #                                nn.Conv2d(256, num_classes, kernel_size=1, stride=1))

        if freeze_bn:
            self.freeze_bn()
Beispiel #7
0
    def __init__(self,
                 backbone='resnet',
                 n_in_channels=1,
                 output_stride=16,
                 num_classes=1,
                 pretrained_backbone=False):
        super(DeepLab, self).__init__()
        if backbone == 'drn':
            output_stride = 8

        BatchNorm = nn.BatchNorm2d
        self.backbone = build_backbone(backbone, n_in_channels, output_stride,
                                       BatchNorm, pretrained_backbone)
        self.aspp = build_aspp(backbone, output_stride, BatchNorm)
        self.decoder = build_decoder(num_classes, backbone, BatchNorm)
        return
Beispiel #8
0
    def __init__(self,
                 backbone='resnet',
                 output_stride=16,
                 num_class=21,
                 sync_bn=True,
                 freeze_bn=False):
        super(DeepLab, self).__init__()
        if sync_bn:
            batch_norm = SynchronizedBatchNorm2d
        else:
            batch_norm = nn.BatchNorm2d

        self.backbone = build_backbone(backbone, output_stride, batch_norm)
        self.aspp = build_aspp(backbone, output_stride, batch_norm)
        self.decoder = build_decoder(num_class, backbone, batch_norm)
        self.freeze_bn = freeze_bn
Beispiel #9
0
    def __init__(self, backbone='resnet18', in_channels=3, output_stride=8, num_classes=1,
                 sync_bn=True, freeze_bn=False, pretrained=False, **kwargs):
        super(ConsistentDeepLab, self).__init__()
        if backbone in ['drn', 'resnet18', 'resnet34']:
            output_stride = 8

        if sync_bn == True:
            BatchNorm = SynchronizedBatchNorm2d
        else:
            BatchNorm = nn.BatchNorm2d

        self.backbone = build_backbone(backbone, in_channels, output_stride, BatchNorm, pretrained)
        self.aspp = build_aspp(backbone, output_stride, BatchNorm)
        self.decoder = build_decoder(num_classes, backbone, BatchNorm)

        if freeze_bn:
            self.freeze_bn()
Beispiel #10
0
    def __init__(self, backbone='resnet', output_stride=16, num_classes=21,
                 sync_bn=True, freeze_bn=False, pretrained=False):
        super(DeepLab, self).__init__()
        if backbone == 'drn':
            output_stride = 8

        if sync_bn == True:
            BatchNorm = SynchronizedBatchNorm2d
        else:
            BatchNorm = nn.BatchNorm2d

        self.backbone = build_backbone(backbone, output_stride, BatchNorm,
                                       pretrained=pretrained)
        self.aspp = build_aspp(backbone, output_stride, BatchNorm)
        self.decoder = build_decoder(num_classes, backbone, BatchNorm)

        if freeze_bn:
            self.freeze_bn()
    def __init__(self, n_vocab, max_seq_len, z_dim, c_dim, emb_dim,
                 pretrained_emb, freeze_embeddings, flow, flow_type, E_args,
                 G_args, C_args):
        super(RNN_VAE, self).__init__()
        self.MAX_SEQ_LEN = max_seq_len
        self.n_vocab = n_vocab
        self.z_dim = z_dim
        self.c_dim = c_dim
        self.device = torch.device('cuda')
        """
        Word embeddings layer
        """
        self.emb_dim = emb_dim
        self.word_emb = nn.Embedding(n_vocab, self.emb_dim, PAD_IDX)
        if pretrained_emb is not None:
            assert self.emb_dim == pretrained_emb.size(
                1), 'emb dim dont match with pretrained'
            self.word_emb = nn.Embedding(n_vocab, self.emb_dim, PAD_IDX)
            # Set pretrained embeddings
            self.word_emb.weight.data.copy_(pretrained_emb)
        if freeze_embeddings:
            self.word_emb.weight.requires_grad = False
        '''
        Initialize all the modules
        '''
        self.encoder = build_encoder('gru',
                                     emb_dim=self.emb_dim,
                                     z_dim=z_dim,
                                     **E_args)
        self.decoder = build_decoder(embedding=self.word_emb,
                                     emb_dim=self.emb_dim + z_dim + c_dim,
                                     output_dim=n_vocab,
                                     h_dim=z_dim + c_dim,
                                     **G_args)
        self.classifier = build_classifier('cnn', self.emb_dim, **C_args)

        # Intiialize flow
        self.use_flow = flow > 0
        if self.use_flow:
            self.flow_model = build_flow(flow_type, flow, z_dim)
Beispiel #12
0
    def __init__(self, config):
        super(Transducer, self).__init__()
        #build cnn
        # self.conv1 = nn.Sequential(
        #     nn.Conv2d(in_channels=1,out_channels=1,kernel_size=5,stride=1,padding=(2,2)),
        #     nn.ReLU(),
        #     nn.MaxPool2d(kernel_size=2,stride=2)
        # )
        # self.conv2 = nn.Sequential(
        #     nn.Conv2d(in_channels=1, out_channels=1, kernel_size=5, stride=1, padding=(2, 2)),
        #     nn.ReLU(),
        #     nn.MaxPool2d(kernel_size=2,stride=2)
        # )
        self.config = config
        self.alpha = config.alpha
        # define encoder
        self.encoder = build_encoder(config.enc)
        self.fir_enc = buildFir_enc(config.fir_enc)
        # define decoder
        self.decoder = build_decoder(config.dec)
        self.max_target_length = config.max_target_length
        # define JointNet
        self.joint = JointNet(input_size=config.joint.input_size,
                              inner_dim=config.joint.inner_size,
                              vocab_size=config.vocab_size)

        if config.share_embedding:
            assert self.decoder.embedding.weight.size(
            ) == self.joint.project_layer.weight.size(), '%d != %d' % (
                self.decoder.embedding.weight.size(1),
                self.joint.project_layer.weight.size(1))
            self.joint.project_layer.weight = self.decoder.embedding.weight

        self.rnnt = RNNTLoss()
        self.crit = nn.CrossEntropyLoss()

        #if hiratical lstm or not
        self.fir_enc_or_not = config.fir_enc_or_not
Beispiel #13
0
 def __init__(self, backbone='resnet', output_stride=16, num_classes=21):
     super().__init__()
     self.backbone = build_backbone(backbone, output_stride)
     self.aspp = build_ASPP(backbone, output_stride)
     self.decoder = build_decoder(backbone, num_classes)
     self._init_weight()