Exemplo n.º 1
0
    def __init__(self,
                 num_classes=0,
                 loss='softmax',
                 neck=False,
                 drop_height_ratio=0.33,
                 drop_width_ratio=1.0,
                 **kwargs):
        super(NoDropNet, self).__init__()
        self.loss = loss
        if neck:
            self.bottleneck_global = nn.BatchNorm1d(512)
            self.bottleneck_global.bias.requires_grad_(False)  # no shift
            self.bottleneck_regularization = nn.BatchNorm1d(2048)
            self.bottleneck_regularization.bias.requires_grad_(
                False)  # no shift
        else:
            self.bottleneck_global = None
            self.bottleneck_regularization = None

        self.reduction_global = nn.Sequential(nn.Conv2d(2048, 512, 1),
                                              nn.BatchNorm2d(512), nn.ReLU())
        self.reduction_regularization = nn.Sequential(Bottleneck(2048, 512),
                                                      Bottleneck(2048, 512))
        self.avgpool_global = nn.AdaptiveAvgPool2d((1, 1))
        self.avgpool_regularization = nn.AdaptiveAvgPool2d((1, 1))
        self.classifier_global = nn.Linear(512, num_classes)
        self.classifier_regularization = nn.Linear(2048, num_classes)
        self._init_params()

        resnet = resnet50_ls(num_classes, pretrained=True)
        self.base = nn.Sequential(*list(resnet.children())[:-2])
Exemplo n.º 2
0
    def __init__(self, args):
        super(MCN, self).__init__()
        self.n_c = args.parts
        self.chs = 2048 // self.n_c

        resnet_ = resnet50(pretrained=True)

        self.layer0 = nn.Sequential(
            resnet_.conv1,
            resnet_.bn1,
            resnet_.relu,
            resnet_.maxpool)
        self.layer1 = resnet_.layer1
        self.layer2 = resnet_.layer2
        self.layer3 = resnet_.layer3
        self.layer4 = nn.Sequential(
            Bottleneck(1024, 512, downsample=nn.Sequential(
                nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        self.layer4.load_state_dict(resnet_.layer4.state_dict())

        self.gap = nn.AdaptiveAvgPool2d((1, 1))
        self.global_branch = new_BNNeck(2048, args.num_classes, 256,return_f=True)

        self.shared = nn.Sequential(nn.Conv2d(
            self.chs, 256, 1, bias=False), nn.BatchNorm2d(256), nn.ReLU(True))
        self.weights_init_kaiming(self.shared)

        for i in range(self.n_c):
            name = 'bnneck_' + str(i)
            setattr(self, name, BNNeck(256, args.num_classes, return_f=True))
Exemplo n.º 3
0
    def __init__(self):
        super(Baseline, self).__init__()
        num_classes = opt.num_classes
        resnet = resnet50()
        layer4 = nn.Sequential(
            Bottleneck(1024,
                       2048,
                       downsample=nn.Conv2d(2048, 2048, 3, 1, padding=1)),
            Bottleneck(1024, 2048), Bottleneck(1024, 2048))
        layer4.apply(weights_init_kaiming)  #difference with liaoxingyu
        self.backbone = nn.Sequential(resnet.conv1, resnet.bn1, resnet.relu,
                                      resnet.maxpool, resnet.layer1,
                                      resnet.layer2, resnet.layer3, layer4)
        #global
        self.gap = nn.AdaptiveAvgPool2d(1)
        self.bn = nn.BatchNorm1d(2048)
        self.bn.bias.requires_grad_(False)  #b=0
        self.bn.apply(weights_init_kaiming)
        self.classifier = nn.Linear(2048, num_classes, bias=False)
        self.classifier.apply(weights_init_classifier)

        #local
        self.gmp = nn.AdaptiveMaxPool2d(1)
        self.local_bn = nn.BatchNorm1d(2048)
        self.local_bn.bias.requires_grad_(False)
        self.local_bn.apply(weights_init_kaiming)
        self.local_classifier = nn.Linear(2048, num_classes, bias=False)
        self.local_classifier.apply(weights_init_classifier)
Exemplo n.º 4
0
    def __init__(self, num_classes, args):
        super(MGN, self).__init__()

        self.args = args
        resnet = resnet50(pretrained=False)

        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0]
        )

        res_conv4 = nn.Sequential(*resnet.layer3[1:])
        res_p_conv5 = nn.Sequential(
            Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512))

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
        self.p2 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
        self.p3 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
        print('Model Structure:')
        print(self)
Exemplo n.º 5
0
    def __init__(self, num_classes, width_ratio=0.5, height_ratio=0.5):
        super(BFE, self).__init__()
        resnet = resnet50(pretrained=True)
        resnet.fc = nn.Sequential()
        self.model = resnet
        self.model.layer4[0].downsample[0].stride = (1, 1)
        self.model.layer4[0].conv2.stride = (1, 1)

        sub_resnet = resnet50(pretrained=True)
        sub_resnet.fc = nn.Sequential()
        self.sub_model = sub_resnet
        self.sub_model.layer4[0].downsample[0].stride = (1, 1)
        self.sub_model.layer4[0].conv2.stride = (1, 1)

        # global branch
        self.bottleneck_g1 = Bottleneck(2048, 512)
        self.bottleneck_g1_1 = Bottleneck(1024, 256)
        self.bottleneck_g2 = Bottleneck(2048, 512)
        self.bottleneck_g2_1 = Bottleneck(1024, 256)
        self.PBN1 = PBN_modify(2048, num_classes, num_reduction=512)  # 到这来
        self.PBN1_1 = PBN_modify(1024, num_classes, num_reduction=256)  # 到这来

        self.PBN2 = PBN_modify(2048, num_classes, num_reduction=512)
        self.PBN2_1 = PBN_modify(1024, num_classes, num_reduction=256)
        self.convbn = Conv_BN(1536, num_classes, num_reduction=512)
Exemplo n.º 6
0
    def __init__(self, num_classes, bins=[1, 2, 3, 6]):
        super(Spark2, self).__init__()
        resnet = resnet50(pretrained=True)
        layer4 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        layer4.load_state_dict(resnet.layer4.state_dict())
        self.backbone = nn.Sequential(resnet.conv1, resnet.bn1, resnet.relu,
                                      resnet.maxpool, resnet.layer1,
                                      resnet.layer2, resnet.layer3, layer4)
        self.ppm = PPM(2048, 2048 // len(bins), bins)
        self.branch1 = nn.Conv2d(4096, 2048, 1, bias=False)
        self.branch2 = nn.Conv2d(4096, 2048, 1, bias=False)

        self.maxp = nn.MaxPool2d(kernel_size=(9, 6))  # 2*1
        self.avgp = nn.AvgPool2d(kernel_size=(9, 6))  # 2*1
        self.maxpool = nn.MaxPool2d(kernel_size=(18, 18))  # 2*1
        self.avgpool = nn.AvgPool2d(kernel_size=(18, 18))  # 2*1

        # total 3+2+3+4 branch for classificaiton
        self.refc1 = REFC(2048, 512, num_classes)
        self.refc2 = REFC(2048, 256, num_classes)
        self.refc3 = REFC(2048, 256, num_classes)
        self.refc4 = REFC(2048, 256, num_classes)
        self.refc5 = REFC(2048, 256, num_classes)
        self.refc6 = REFC(2048, 256, num_classes)
        self.refc7 = REFC(2048, 256, num_classes)
Exemplo n.º 7
0
 def __init__(self, num_classes,bins=[1,2,3,6]):
     super(Baseline, self).__init__()
     resnet = resnet50(pretrained=True)
     layer4 = nn.Sequential(
         Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
         Bottleneck(2048, 512),
         Bottleneck(2048, 512))
     layer4.load_state_dict(resnet.layer4.state_dict())
     self.backbone = nn.Sequential(
         resnet.conv1,
         resnet.bn1,
         resnet.relu,
         resnet.maxpool,
         resnet.layer1,
         resnet.layer2,
         resnet.layer3,
         layer4
     )
     
     #self.maxpool = nn.MaxPool2d(kernel_size=(18, 18)) 
     self.maxpool = nn.AdaptiveMaxPool2d(1)
     self.bn = nn.BatchNorm1d(2048)
     nn.init.normal_(self.bn.weight, mean=1., std=0.02)
     nn.init.constant_(self.bn.bias, 0.)
     self.fc = nn.Linear(2048, num_classes,bias=False)
     nn.init.kaiming_normal_(self.fc.weight, mode='fan_out')
Exemplo n.º 8
0
    def __init__(self, num_classes, width_ratio=0.5, height_ratio=0.5):
        super(BFE, self).__init__()
        model = resnet50(pretrained=True)
        model.fc = nn.Sequential()
        self.model = model
        self.model.layer4[0].downsample[0].stride = (1, 1)
        self.model.layer4[0].conv2.stride = (1, 1)

        # layer3 branch
        self.bottleneck1 = Bottleneck(1024, 256)
        self.PBN1 = PBN(1024,
                        num_classes,
                        do_reduction=True,
                        num_reduction=512)

        # global branch
        self.PBN2 = PBN(2048,
                        num_classes,
                        do_reduction=True,
                        num_reduction=512)  # 到这来

        # part1 branch
        self.bottleneck2 = Bottleneck(2048, 512)
        self.PBN3 = PBN(2048,
                        num_classes,
                        do_reduction=True,
                        num_reduction=512)

        # part2 branch
        self.bottleneck3 = Bottleneck(2048, 512)
        self.PBN4 = PBN(2048,
                        num_classes,
                        do_reduction=True,
                        num_reduction=512)
Exemplo n.º 9
0
    def __init__(self, n_class=1):
        super(QuadriNetFancy, self).__init__()
        features = resnet50(pretrained=True)

        self.conv1 = features.conv1
        self.bn1 = nn.BatchNorm2d(64)
        self.relu = nn.ReLU(inplace=True)
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.layer1 = features.layer1
        self.layer2 = features.layer2
        self.layer3 = features.layer3
        self.layer4 = features.layer4

        #x2 upsamples + residual block evry 2 upsamples
        self.upsample1 = Upsample2Bloc(2048, 1024)  #1/32->1/16
        self.upsample2 = Upsample2Bloc(1024, 512)  #1/16->1/8
        self.residual1 = Bottleneck(512, 128)

        self.upsample3 = Upsample2Bloc(512, 256)  #1/8 -> 1/4
        self.upsample4 = Upsample2Bloc(256, 64)  #1/4 -> 1/2
        self.residual2 = Bottleneck(64, 16)

        #single x2 upsample + residual block
        self.upsample5 = Upsample2Bloc(64, 32)  #1/2 -> 1
        self.residual3 = Bottleneck(32, 8)

        self.out = nn.Conv2d(32, n_class, 1)
Exemplo n.º 10
0
    def __init__(self, num_classes,bins=[1,2,3,6]):
        super(Sparker, self).__init__()
        resnet = resnet50(pretrained=True)
        self.layer1 = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
        ) 
        self.layer2 = resnet.layer2
        self.layer3 = resnet.layer3
        self.layer4 = nn.Sequential(
            Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        self.layer4.load_state_dict(resnet.layer4.state_dict())

        self.maxp1 = nn.AdaptiveMaxPool2d((2,1))  # 2*1
        self.maxp2 = nn.AdaptiveMaxPool2d((1,2))  # 2*1 
        self.maxpool = nn.AdaptiveMaxPool2d(1)  # 2*1

        self.refc1 = REFC(2048, 512, num_classes)
        self.refc2 = REFC(512, 256, num_classes)
        self.refc3 = REFC(512, 256, num_classes)
        self.refc4 = REFC(1024, 256, num_classes)
        self.refc5 = REFC(1024, 256, num_classes)
        self.refc6 = REFC(2048, 256, num_classes)
        self.refc7 = REFC(2048, 256, num_classes)
        self.refc8 = REFC(2048, 256, num_classes)
        self.refc9 = REFC(2048, 256, num_classes)
Exemplo n.º 11
0
    def __init__(self, num_classes, width_ratio=0.5, height_ratio=0.5):
        super(StrongBaseline, self).__init__()
        resnet = resnet50(True)
        layer4 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        layer4.load_state_dict(resnet.layer4.state_dict())  #use resnet weight
        self.backbone = nn.Sequential(resnet.conv1, resnet.bn1, resnet.relu,
                                      resnet.maxpool, resnet.layer1,
                                      resnet.layer2, resnet.layer3, layer4)
        #global
        self.gap = nn.AdaptiveAvgPool2d(1)
        self.bn = nn.BatchNorm1d(2048)
        self.bn.bias.requires_grad_(False)  #b=0
        self.bn.apply(weights_init_kaiming
                      )  #weights_init_kaiming difference with liaoxingyu
        self.classifier = nn.Linear(2048, num_classes, bias=False)  #bias=False
        self.classifier.apply(weights_init_classifier)

        #local
        self.bottleneck = Bottleneck(2048,
                                     512)  #local use it and can`t initial
        self.gmp = nn.AdaptiveMaxPool2d(1)
        self.local_bn = nn.BatchNorm1d(2048)
        self.local_bn.bias.requires_grad_(False)
        self.local_bn.apply(weights_init_kaiming)
        self.local_classifier = nn.Linear(2048, num_classes, bias=False)
        self.local_classifier.apply(weights_init_classifier)
        self.drop = BatchDrop(height_ratio, width_ratio)
Exemplo n.º 12
0
 def __init__(self,num_classes,num_local=4):
     super(Attention, self).__init__()
     self.num_local = num_local
     resnet = resnet50(pretrained=True)
     self.backbone = nn.Sequential(
         resnet.conv1,
         resnet.bn1,
         resnet.relu,
         resnet.maxpool,
         resnet.layer1,
         resnet.layer2,
         resnet.layer3
     )
     self.conv5 = nn.Sequential(
         Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
         Bottleneck(2048, 512),
         Bottleneck(2048, 512))
     self.conv5.load_state_dict(resnet.layer4.state_dict())        
     self.local_list = nn.ModuleList()
     for i in range(num_local):
         self.local_list.append(nn.Sequential(nn.Conv2d(2048,256,1,bias=False),
                                  nn.BatchNorm2d(256),
                                  nn.ReLU(inplace=True)))#relu
     self.local_fcs = nn.ModuleList()
     for _ in range(num_local):
         fc = nn.Linear(256,num_classes,bias=False)
         nn.init.normal_(fc.weight,std=0.001)
         self.local_fcs.append(fc) 
     self.avgpool = nn.AvgPool2d(kernel_size=(24,24))
     self.maxpool = nn.MaxPool2d(kernel_size=(24,24))    
Exemplo n.º 13
0
    def __init__(self, num_classes=0, width_ratio=0.5, height_ratio=0.5):
        super(BagReID_RESNET, self).__init__()

        resnet = resnet50(pretrained=True)
        layer4 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       stride=1,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024,
                                     2048,
                                     kernel_size=1,
                                     stride=1,
                                     bias=False),
                           nn.BatchNorm2d(2048),
                       )),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512),
        )
        layer4.load_state_dict(resnet.layer4.state_dict())

        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,  # res_conv2
            resnet.layer2,  # res_conv3
            resnet.layer3,  # res_conv4
            layer4)

        # global branch
        self.global_avgpool = nn.AdaptiveAvgPool2d((1, 1))
        self.global_bn = nn.BatchNorm1d(cfg.MODEL.GLOBAL_FEATS, affine=False)
        self.global_softmax = nn.Linear(cfg.MODEL.GLOBAL_FEATS,
                                        num_classes,
                                        bias=False)
        self.global_softmax.apply(weights_init_kaiming)
        # self.global_reduction = nn.Sequential(
        #     nn.Conv2d(2048, cfg.MODEL.GLOBAL_FEATS, 1),
        #     nn.BatchNorm2d(cfg.MODEL.GLOBAL_FEATS),
        #     nn.ReLU(True)
        # )
        # self.global_reduction.apply(weights_init_kaiming)

        # part branch
        self.part = Bottleneck(2048, 512)
        self.part_maxpool = nn.AdaptiveMaxPool2d((1, 1))
        self.batch_drop = BatchDrop(height_ratio, width_ratio)
        self.part_reduction = nn.Sequential(
            nn.Linear(2048, cfg.MODEL.PART_FEATS, True),
            nn.BatchNorm1d(cfg.MODEL.PART_FEATS), nn.ReLU(True))

        self.part_reduction.apply(weights_init_kaiming)
        self.part_bn = nn.BatchNorm1d(cfg.MODEL.PART_FEATS, affine=False)
        self.part_softmax = nn.Linear(cfg.MODEL.PART_FEATS,
                                      num_classes,
                                      bias=False)
        self.part_softmax.apply(weights_init_kaiming)
    def __init__(self):
        super(MGN, self).__init__()

        feats = 256
        resnet = resnet50(pretrained=True)

        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0],
        )

        res_conv4 = nn.Sequential(*resnet.layer3[1:])

        res_g_conv5 = resnet.layer4

        res_p_conv5 = nn.Sequential(
            Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(resnet.layer4.state_dict())

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_g_conv5))
        self.p2 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
        self.p3 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))

        self.maxpool_zg_p1 = nn.MaxPool2d(kernel_size=(12, 4))
        self.maxpool_zg_p2 = nn.MaxPool2d(kernel_size=(24, 8))
        self.maxpool_zg_p3 = nn.MaxPool2d(kernel_size=(24, 8))
        self.maxpool_zp2 = nn.MaxPool2d(kernel_size=(12, 8))
        self.maxpool_zp3 = nn.MaxPool2d(kernel_size=(8, 8))

        self.reduction = nn.Sequential(nn.Conv2d(2048, feats, 1, bias=False), nn.BatchNorm2d(feats), nn.ReLU())

        self._init_reduction(self.reduction)

        self.fc_id_2048_0 = nn.Linear(feats, num_classes)
        self.fc_id_2048_1 = nn.Linear(feats, num_classes)
        self.fc_id_2048_2 = nn.Linear(feats, num_classes)

        self.fc_id_256_1_0 = nn.Linear(feats, num_classes)
        self.fc_id_256_1_1 = nn.Linear(feats, num_classes)
        self.fc_id_256_2_0 = nn.Linear(feats, num_classes)
        self.fc_id_256_2_1 = nn.Linear(feats, num_classes)
        self.fc_id_256_2_2 = nn.Linear(feats, num_classes)

        self._init_fc(self.fc_id_2048_0)
        self._init_fc(self.fc_id_2048_1)
        self._init_fc(self.fc_id_2048_2)

        self._init_fc(self.fc_id_256_1_0)
        self._init_fc(self.fc_id_256_1_1)
        self._init_fc(self.fc_id_256_2_0)
        self._init_fc(self.fc_id_256_2_1)
        self._init_fc(self.fc_id_256_2_2)
Exemplo n.º 15
0
    def __init__(self):
        super(MGN_PTL, self).__init__()
        num_classes = opt.classn
        feats = 256
        self.val = False
        self.ptl = PTL()
        self.ptl.init_param()
        if opt.backbone == 'resnet50':
            self.backbone = resnet50(pretrained=True)
        elif opt.backbone == 'resnet101':
            self.backbone = resnet101(pretrained=True)
        res_conv4 = nn.Sequential(*self.backbone.layer3[1:])
        res_g_conv5 = self.backbone.layer4
        res_p_conv5 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(self.backbone.layer4.state_dict())

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_g_conv5))
        self.p2 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))
        self.p3 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))

        self.maxpool_zg_p1 = nn.MaxPool2d(kernel_size=(12, 4))
        self.maxpool_zg_p2 = nn.MaxPool2d(kernel_size=(24, 8))
        self.maxpool_zg_p3 = nn.MaxPool2d(kernel_size=(24, 8))
        self.maxpool_zp2 = nn.MaxPool2d(kernel_size=(12, 8))
        self.maxpool_zp3 = nn.MaxPool2d(kernel_size=(8, 8))

        self.reduction = nn.Sequential(
            nn.Conv2d(2048 + 128, feats, 1, bias=True), nn.BatchNorm2d(feats),
            nn.ReLU())

        self._init_reduction(self.reduction)

        self.fc_id_2048_0 = nn.Linear(feats, num_classes)
        self.fc_id_2048_1 = nn.Linear(feats, num_classes)
        self.fc_id_2048_2 = nn.Linear(feats, num_classes)
        self.fc_id_256_1_0 = nn.Linear(feats, num_classes)
        self.fc_id_256_1_1 = nn.Linear(feats, num_classes)
        self.fc_id_256_2_0 = nn.Linear(feats, num_classes)
        self.fc_id_256_2_1 = nn.Linear(feats, num_classes)
        self.fc_id_256_2_2 = nn.Linear(feats, num_classes)

        self._init_fc(self.fc_id_2048_0)
        self._init_fc(self.fc_id_2048_1)
        self._init_fc(self.fc_id_2048_2)
        self._init_fc(self.fc_id_256_1_0)
        self._init_fc(self.fc_id_256_1_1)
        self._init_fc(self.fc_id_256_2_0)
        self._init_fc(self.fc_id_256_2_1)
        self._init_fc(self.fc_id_256_2_2)
Exemplo n.º 16
0
    def __init__(self, num_classes, fc_dims=None, loss=None, dropout_p=None,  **kwargs):
        super(ResNet, self).__init__()
        
        resnet_ = resnet50(pretrained=True)
        
        self.loss = loss
        
        self.layer0 = nn.Sequential(
            resnet_.conv1,
            resnet_.bn1,
            resnet_.relu,
            resnet_.maxpool)
        self.layer1 = resnet_.layer1
        self.layer2 = resnet_.layer2

        self.pc1 = PC_Module(512)

        self.layer3 = resnet_.layer3
 
        self.pc2 = PC_Module(1024)
        layer4 = nn.Sequential(
            Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        layer4.load_state_dict(resnet_.layer4.state_dict())
        
        self.layer40 = nn.Sequential(copy.deepcopy(layer4))
        self.layer41 = nn.Sequential(copy.deepcopy(layer4))

        self.pam_module1 = PAM_Module(2048)
        self.pam_module2 = PAM_Module(2048)

        
        self.batch_drop = BatchDrop()
        
        self.res_part1 = Bottleneck(2048, 512) 
        self.res_part2 = Bottleneck(2048, 512)  
                
        self.global_avgpool = nn.AdaptiveAvgPool2d((1, 1))
        self.global_maxpool = nn.AdaptiveMaxPool2d((1, 1))

        self.bn1 = nn.BatchNorm1d(2048)
        self.bn2 = nn.BatchNorm1d(2048)

        self.classifier1 = nn.Linear(2048, num_classes)
        self.classifier2 = nn.Linear(2048, num_classes)
              
        nn.init.constant_(self.bn1.weight, 1.0)
        nn.init.constant_(self.bn1.bias, 0.0)
        nn.init.constant_(self.bn2.weight, 1.0)
        nn.init.constant_(self.bn2.bias, 0.0)
        nn.init.normal_(self.classifier1.weight, 0, 0.01)
        if self.classifier1.bias is not None:
            nn.init.constant_(self.classifier1.bias, 0)
        nn.init.normal_(self.classifier2.weight, 0, 0.01)
        if self.classifier2.bias is not None:
            nn.init.constant_(self.classifier2.bias, 0)
Exemplo n.º 17
0
    def __init__(self, num_classes, width_ratio=0.5, height_ratio=0.5):
        super(BFE, self).__init__()
        resnet = resnet50(pretrained=True)
        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,  # res_conv2
            resnet.layer2,  # res_conv3
            resnet.layer3,  # res_conv4
        )
        self.res_part = nn.Sequential(
            Bottleneck(1024,
                       512,
                       stride=1,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024,
                                     2048,
                                     kernel_size=1,
                                     stride=1,
                                     bias=False),
                           nn.BatchNorm2d(2048),
                       )),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512),
        )
        self.res_part.load_state_dict(resnet.layer4.state_dict())
        reduction = nn.Sequential(nn.Conv2d(2048, 512, 1), nn.BatchNorm2d(512),
                                  nn.ReLU())
        # stage 1
        self.global_avgpool = nn.AdaptiveAvgPool2d((1, 1))
        self.global_softmax = nn.Linear(512, num_classes)
        self.global_softmax.apply(weights_init_kaiming)
        self.global_reduction = copy.deepcopy(reduction)
        self.global_reduction.apply(weights_init_kaiming)

        # stage 2
        self.res_part2 = Bottleneck(2048, 512)
        self.part_maxpool = nn.AdaptiveMaxPool2d((1, 1))

        # stage 2
        self.cutmix_batch_drop1 = CutMixBatchDrop(height_ratio, width_ratio)
        self.reduction1 = nn.Sequential(nn.Linear(2048, 512, 1),
                                        nn.BatchNorm1d(512), nn.ReLU())
        self.reduction1.apply(weights_init_kaiming)
        self.softmax1 = nn.Linear(512, num_classes)
        self.softmax1.apply(weights_init_kaiming)

        # stage 3
        self.cutmix_batch_drop2 = CutMixBatchDrop(height_ratio, width_ratio)
        self.reduction2 = nn.Sequential(nn.Linear(2048, 512, 1),
                                        nn.BatchNorm1d(512), nn.ReLU())
        self.reduction2.apply(weights_init_kaiming)
        self.softmax2 = nn.Linear(512, num_classes)
        self.softmax2.apply(weights_init_kaiming)
Exemplo n.º 18
0
    def __init__(self,num_classes):
        super(Fighter, self).__init__()

        resnet = resnet50(pretrained=True)

        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0],
        )

        res_conv4 = nn.Sequential(*resnet.layer3[1:])

        res_p_conv5 = nn.Sequential(
            Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(resnet.layer4.state_dict())

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
        self.p2 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))

        self.maxpool_g = nn.MaxPool2d(kernel_size=(16, 16))
        
        self.maxpool_h = nn.MaxPool2d(kernel_size=(4, 16))
        self.maxpool_v = nn.MaxPool2d(kernel_size=(16, 4))

        #total 3+2+3+4 branch for classificaiton
        self.fc_g_1 = ReductionFc(2048,256,num_classes)
        self.fc_g_2 = ReductionFc(2048,256,num_classes)
        
        self.fc_h_1 = ReductionFc(2048,256,num_classes)
        self.fc_h_2 = ReductionFc(2048,256,num_classes)
        self.fc_h_3 = ReductionFc(2048,256,num_classes)
        self.fc_h_4 = ReductionFc(2048,256,num_classes)

        self.fc_hc_1 = ReductionFc(512,256,num_classes)
        self.fc_hc_2 = ReductionFc(512,256,num_classes)
        self.fc_hc_3 = ReductionFc(512,256,num_classes)
        self.fc_hc_4 = ReductionFc(512,256,num_classes)
        
        self.fc_v_1 = ReductionFc(2048,256,num_classes)
        self.fc_v_2 = ReductionFc(2048,256,num_classes)
        self.fc_v_3 = ReductionFc(2048,256,num_classes)
        self.fc_v_4 = ReductionFc(2048,256,num_classes)
        
        self.fc_vc_1 = ReductionFc(512,256,num_classes)
        self.fc_vc_2 = ReductionFc(512,256,num_classes)
        self.fc_vc_3 = ReductionFc(512,256,num_classes)
        self.fc_vc_4 = ReductionFc(512,256,num_classes)
Exemplo n.º 19
0
    def __init__(self, num_classes):
        super(SparkPower, self).__init__()

        resnet = resnet50(pretrained=True)
        #modify the first conv
        #resnet.conv1 = nn.Conv2d(4, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0],
        )

        res_conv4 = nn.Sequential(*resnet.layer3[1:])
        res_p_conv5 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(resnet.layer4.state_dict())

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))

        self.PB1 = PoolBlock(size=(9, 4), stride=(3, 2), dim=2)  #6*3
        self.PB2 = PoolBlock(size=(18, 5), stride=(3, 1))  #3*4
        self.PB3 = PoolBlock(size=(8, 8), stride=(4, 8))  #5*1
        self.PB4 = PoolBlock(size=(12, 4), stride=(6, 2), dim=23)  #3*3->1

        self.avgpool = nn.AvgPool2d(kernel_size=(24, 8))
        self.maxpool = nn.MaxPool2d(kernel_size=(24, 8))

        self.p1_1 = ReductionFc(2048, 256, num_classes)
        self.p1_2 = ReductionFc(2048, 256, num_classes)
        self.p1_3 = ReductionFc(2048, 256, num_classes)
        self.p2_1 = ReductionFc(2048, 256, num_classes)
        self.p2_2 = ReductionFc(2048, 256, num_classes)
        self.p2_3 = ReductionFc(2048, 256, num_classes)
        self.p3_1 = ReductionFc(2048, 256, num_classes)
        self.p3_2 = ReductionFc(2048, 256, num_classes)
        self.p3_3 = ReductionFc(2048, 256, num_classes)
        self.p3_4 = ReductionFc(2048, 256, num_classes)
        self.p3_5 = ReductionFc(2048, 256, num_classes)
        self.p4_1 = ReductionFc(2048, 256, num_classes)

        self.p_g1 = ReductionFc(638, 256, num_classes)
        self.p_g2 = ReductionFc(638, 256, num_classes)
        self.p_g3 = ReductionFc(638, 256, num_classes)
        self.p_g4 = ReductionFc(638, 256, num_classes)
Exemplo n.º 20
0
    def __init__(self, in_channels, out_channels, bilinear=True):
        super().__init__(in_channels, out_channels, bilinear)

        downsample = nn.Sequential(
            nn.Conv2d(in_channels, out_channels, 1, bias=False),
            nn.BatchNorm2d(out_channels),
        )
        self.conv = nn.Sequential(
            Bottleneck(in_channels, out_channels // 4, downsample=downsample),
            Bottleneck(out_channels, out_channels // 4),
            Bottleneck(out_channels, out_channels // 4))
Exemplo n.º 21
0
    def __init__(self, channels, h_ratio=0.33, w_ratio=1., double_bottleneck = False):
        super(BatchFeatureErase_Top, self).__init__()
        if double_bottleneck:
            self.drop_batch_bottleneck = nn.Sequential(
                Bottleneck(channels, 512),
                Bottleneck(channels, 512)
            )
        else:
            self.drop_batch_bottleneck = Bottleneck(channels, 512)

        self.drop_batch_drop_basic = BatchDrop(h_ratio, w_ratio)
        self.drop_batch_drop_top = BatchDropTop(h_ratio)
Exemplo n.º 22
0
    def __init__(self, feats=256):
        super(MGN, self).__init__()

        resnet = resnet50(pretrained=True)

        self.backone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0],
        )

        res_conv4 = nn.Sequential(*resnet.layer3[1:])

        res_g_conv5 = resnet.layer4

        res_p_conv5 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(resnet.layer4.state_dict())

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_g_conv5))
        self.p2 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))
        self.p3 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))

        self.pool_zg_p1 = PartPool(part_num=1, method='avg')
        self.pool_zg_p2 = PartPool(part_num=1, method='avg')
        self.pool_zg_p3 = PartPool(part_num=1, method='avg')
        self.pool_zp2 = PartPool(part_num=2, method='avg')
        self.pool_zp3 = PartPool(part_num=3, method='avg')

        reduction = nn.Sequential(nn.Conv2d(2048, feats, 1, bias=False),
                                  nn.BatchNorm2d(feats), nn.ReLU())

        self._init_reduction(reduction)
        self.reduction_0 = copy.deepcopy(reduction)
        self.reduction_1 = copy.deepcopy(reduction)
        self.reduction_2 = copy.deepcopy(reduction)
        self.reduction_3 = copy.deepcopy(reduction)
        self.reduction_4 = copy.deepcopy(reduction)
        self.reduction_5 = copy.deepcopy(reduction)
        self.reduction_6 = copy.deepcopy(reduction)
        self.reduction_7 = copy.deepcopy(reduction)
Exemplo n.º 23
0
    def __init__(self, args):
        super(Base, self).__init__()
        num_classes = args.num_classes
        feats = args.feats
        resnet = resnet50(pretrained=True)

        #定义主干网络
        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0],
        )
        res_conv4 = nn.Sequential(*resnet.layer3[1:])
        res_p_conv5 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(resnet.layer4.state_dict())
        self.p1 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))
        self.p2 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))

        #池化层
        if args.pool == 'max':
            pool2d = nn.MaxPool2d
        elif args.pool == 'avg':
            pool2d = nn.AvgPool2d
        else:
            raise Exception()
        self.pool_p1 = pool2d(kernel_size=(24, 8))
        self.pool_p2 = pool2d(kernel_size=(24, 8))

        #1x1卷积层,降维
        reduction = nn.Sequential(nn.Conv2d(2048, args.feats, 1, bias=False),
                                  nn.BatchNorm2d(args.feats), nn.ReLU())
        self._init_reduction(reduction)

        self.reduction_1 = copy.deepcopy(reduction)
        self.reduction_2 = copy.deepcopy(reduction)

        #全连接层
        self.fc_1 = nn.Linear(feats, num_classes)
        self.fc_2 = nn.Linear(feats, num_classes)
        self._init_fc(self.fc_1)
        self._init_fc(self.fc_2)
Exemplo n.º 24
0
    def __init__(self, inplanes, planes, num_layers):
        super(BottleneckUResNetLayer, self).__init__()
        self.conv_1 = Bottleneck(inplanes, planes, downsample=nn.Sequential(
            nn.Conv2d(inplanes, 4*planes, kernel_size=1, bias=False),
            nn.BatchNorm2d(4*planes)
        ))

        self.layers = nn.Sequential(*[
            *[Bottleneck(4*planes, planes) for _ in range(num_layers - 2)]
        ])

        self.upsample = TransposedBottleneck(4*planes, planes, stride=2)
 def __init__(self, in_features, num_classes, pretrained=False):
     super(RoIFeatureExtractor_new, self).__init__()
     self.fc_head = TwoMLPHead(in_channels=1280 * 7 * 7,
                               representation_size=in_features)
     layers = [
         BasicBlock(256 * 5, 1024 * 5),
         Bottleneck(1024 * 5, 1024 * 5),
         BasicBlock(256 * 5, 1024 * 5),
         Bottleneck(1024 * 5, 1024 * 5),
         BasicBlock(256 * 5, 1024 * 5),
         Bottleneck(1024 * 5, 1024 * 5)
     ]
     self.conv_head = nn.Sequential(*layers)
Exemplo n.º 26
0
    def __init__(self, features, bilinear: bool = True, v2=False):
        super(ResUNet, self).__init__()
        self.features = features
        self.name = 'ResUNet'
        self.v2 = v2

        is_light = self.features.bottleneck == vrn.BasicBlock
        channels = [64, 64, 128, 256, 512] if is_light else [64, 256, 512, 1024, 2048]
        self.base_channel_size = channels[0]
        factor = 2 if bilinear else 1

        self.top = nn.Sequential(
            nn.Conv2d(1, channels[0], 3, padding=1, bias=False),
            nn.BatchNorm2d(channels[0]),
            nn.ReLU(inplace=True),
            nn.Conv2d(channels[0], channels[0], 3, padding=1, bias=False),
            nn.BatchNorm2d(channels[0]),
            nn.ReLU(inplace=True),
        )

        if v2:
            self.smooth4 = nn.Conv2d(channels[3], channels[3], kernel_size=1)
            self.smooth3 = nn.Conv2d(channels[2], channels[2], kernel_size=1)
            self.smooth2 = nn.Conv2d(channels[1], channels[1], kernel_size=1)
            self.smooth1 = nn.Conv2d(channels[0], channels[0], kernel_size=1)
            self.bottom = nn.Sequential(
                Bottleneck(channels[4], channels[4] // 4),
                Bottleneck(channels[4], channels[4] // 4),
                Bottleneck(channels[4], channels[4] // 4)
            )
        else:
            self.bottom = nn.Sequential(
                nn.Conv2d(channels[4], channels[4], 1),
                nn.ReLU(inplace=True),
            )

        if v2:
            Up = Upv2
        else:
            Up = Upv1
        #                                                                            up + skip, out
        self.up1 = Up(channels[4] + channels[3], channels[4] // factor, bilinear)  # 2048 + 1024, 1024
        self.up2 = Up(channels[3] + channels[2], channels[3] // factor, bilinear)  # 1024 + 512, 512
        self.up3 = Up(channels[2] + channels[1], channels[2] // factor, bilinear)  # 512 + 256, 256
        self.up4 = Up(channels[1] + channels[0], channels[1] // factor, bilinear)  # 256 + 64, 128
        self.up5 = Up(channels[1] // factor + channels[0], channels[0], bilinear)  # 256 + 64, 64
        self.last_up = nn.Sequential(
            nn.Conv2d(channels[1] // factor, channels[0], kernel_size=1),
            nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True)
        )
        self.pad_to = PadToX(32)
Exemplo n.º 27
0
    def __init__(self, num_classes=CATEGORIES, width_ratio=0.5, height_ratio=0.5, model_path=None):
        super(BFE, self).__init__()
        resnet = resnet50(pretrained=True)
        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,  # res_conv2
            resnet.layer2,  # res_conv3
            resnet.layer3,  # res_conv4
        )
        self.res_part = nn.Sequential(
            Bottleneck(1024, 512, stride=1, downsample=nn.Sequential(
                nn.Conv2d(1024, 2048, kernel_size=1, stride=1, bias=False),
                nn.BatchNorm2d(2048),
            )),
            Bottleneck(2048, 512),
            Bottleneck(2048, 512),
        )
        self.res_part.load_state_dict(resnet.layer4.state_dict())
        reduction = nn.Sequential(
            nn.Conv2d(2048, 512, 1), 
            nn.BatchNorm2d(512), 
            nn.ReLU()
        )
         # global branch
        self.global_avgpool = nn.AdaptiveAvgPool2d((1, 1))
        self.global_softmax = nn.Linear(512, num_classes) 
        self.global_softmax.apply(weights_init_kaiming)
        self.global_reduction = copy.deepcopy(reduction)
        self.global_reduction.apply(weights_init_kaiming)

        # part branch
        self.res_part2 = Bottleneck(2048, 512)
     
        self.part_maxpool = nn.AdaptiveMaxPool2d((1,1))
        self.batch_crop = BatchDrop(height_ratio, width_ratio)
        self.reduction = nn.Sequential(
            nn.Linear(2048, 1024, 1),
            nn.BatchNorm1d(1024),
            nn.ReLU()
        )
        self.reduction.apply(weights_init_kaiming)
        self.softmax = nn.Linear(1024, num_classes)
        self.softmax.apply(weights_init_kaiming)
        state = load_model(model_path)
        if state:
            new_state = self.state_dict()
            new_state.update({k: v for k, v in state.items() if k in new_state})
            self.load_state_dict(new_state)
Exemplo n.º 28
0
    def __init__(self, num_classes):
        super(MGN, self).__init__()

        resnet = resnet50(pretrained=True)

        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0],
        )

        res_conv4 = nn.Sequential(*resnet.layer3[1:])

        res_g_conv5 = resnet.layer4

        res_p_conv5 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(resnet.layer4.state_dict())

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_g_conv5))
        self.p2 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))
        self.p3 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))

        self.maxpool_zg_p1 = nn.MaxPool2d(kernel_size=(12, 4))
        self.maxpool_zg_p2 = nn.MaxPool2d(kernel_size=(24, 8))
        self.maxpool_zg_p3 = nn.MaxPool2d(kernel_size=(24, 8))
        self.maxpool_zp2 = nn.MaxPool2d(kernel_size=(12, 8))
        self.maxpool_zp3 = nn.MaxPool2d(kernel_size=(8, 8))

        self.fc_g1 = ReductionFc(2048, 256, num_classes, part=False)
        self.fc_g2 = ReductionFc(2048, 256, num_classes, part=False)
        self.fc_g3 = ReductionFc(2048, 256, num_classes, part=False)

        self.fc_p1_1 = ReductionFc(2048, 256, num_classes)
        self.fc_p1_2 = ReductionFc(2048, 256, num_classes)
        self.fc_p2_1 = ReductionFc(2048, 256, num_classes)
        self.fc_p2_2 = ReductionFc(2048, 256, num_classes)
        self.fc_p2_3 = ReductionFc(2048, 256, num_classes)
Exemplo n.º 29
0
    def __init__(self, num_classes):
        super(SparkPCA, self).__init__()

        resnet = resnet50(pretrained=True)
        self.backbone = nn.Sequential(
            resnet.conv1,
            resnet.bn1,
            resnet.relu,
            resnet.maxpool,
            resnet.layer1,
            resnet.layer2,
            resnet.layer3[0],
        )

        res_conv4 = nn.Sequential(*resnet.layer3[1:])
        res_p_conv5 = nn.Sequential(
            Bottleneck(1024,
                       512,
                       downsample=nn.Sequential(
                           nn.Conv2d(1024, 2048, 1, bias=False),
                           nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
            Bottleneck(2048, 512))
        res_p_conv5.load_state_dict(resnet.layer4.state_dict())

        self.p1 = nn.Sequential(copy.deepcopy(res_conv4),
                                copy.deepcopy(res_p_conv5))

        self.PB1 = PoolBlock(size=(8, 2), stride=(8, 2))  #3*4
        self.PB2 = PoolBlock(size=(6, 6), stride=(6, 2))  #4*2
        self.PB3 = PoolBlock(size=(8, 8), stride=(8, 8))  #3*1

        self.avgpool = nn.AvgPool2d(kernel_size=(24, 8))
        self.maxpool = nn.MaxPool2d(kernel_size=(24, 8))

        self.p1_1 = ReductionFc(2048, 256, num_classes)
        self.p1_2 = ReductionFc(2048, 256, num_classes)
        self.p1_3 = ReductionFc(2048, 256, num_classes)
        self.p2_1 = ReductionFc(2048, 256, num_classes)
        self.p2_2 = ReductionFc(2048, 256, num_classes)
        self.p2_3 = ReductionFc(2048, 256, num_classes)
        self.p2_4 = ReductionFc(2048, 256, num_classes)
        self.p3_1 = ReductionFc(2048, 256, num_classes)
        self.p3_2 = ReductionFc(2048, 256, num_classes)
        self.p3_3 = ReductionFc(2048, 256, num_classes)

        self.p_g1 = ReductionFc(638, 256, num_classes)
        self.p_g2 = ReductionFc(638, 256, num_classes)
        self.p_g3 = ReductionFc(638, 256, num_classes)
        self.p_g4 = ReductionFc(638, 256, num_classes)
Exemplo n.º 30
0
    def __init__(self, model_path=None, K=1):
        super(MDNet, self).__init__()
        self.K = K

        self.layers = nn.Sequential(OrderedDict([
                ('conv1', nn.Sequential(nn.Conv2d(3, 96, kernel_size=7, stride=2),
                                        nn.ReLU(),
                                        LRN(),
                                        nn.MaxPool2d(kernel_size=3, stride=2))),
                ('conv2', nn.Sequential(nn.Conv2d(96, 256, kernel_size=5, stride=2),
                                        nn.ReLU(),
                                        LRN(),
                                        nn.MaxPool2d(kernel_size=3, stride=2))),
                ('conv3', nn.Sequential(nn.Conv2d(256, 512, kernel_size=3, stride=1),
                                        nn.ReLU())),
                ('fc4',   nn.Sequential(nn.Dropout(0.5),
                                        nn.Linear(512 * 3 * 3, 512),
                                        nn.ReLU())),
                ('fc5',   nn.Sequential(nn.Dropout(0.5),
                                        nn.Linear(512, 512),
                                        nn.ReLU()))]))
        self.branches = nn.ModuleList([nn.Sequential(nn.Dropout(0.5),
                                                     nn.Linear(512, 2)) for _ in range(K)])

        self.branches2 = nn.ModuleList([nn.Sequential(nn.Dropout(0.5),
                                                     nn.Linear(1024, 2)) for _ in range(K)])
        self.res_part2 = nn.Sequential(OrderedDict([
             ('conv4',nn.Sequential(Bottleneck(96, 512, downsample=nn.Sequential(
                 nn.Conv2d(96, 2048, kernel_size=1, stride=1, bias=False),
                 nn.BatchNorm2d(2048))),
                 Bottleneck(2048, 512)))]))
        self.batchdrop = BatchDrop(0.5, 0.2)
        self.part_maxpool = nn.Sequential(OrderedDict([('conv5',nn.AdaptiveMaxPool2d((1,1)))]))
        self.reduction = nn.Sequential(OrderedDict([('fc7',nn.Sequential(
             nn.Linear(2048, 1024, 1),
             nn.BatchNorm1d(1024),
             nn.ReLU()
         ))]))


        if model_path is not None:
            if os.path.splitext(model_path)[1] == '.pth':
                self.load_model(model_path)
            elif os.path.splitext(model_path)[1] == '.mat':
                self.load_mat_model(model_path)
            else:
                raise RuntimeError("Unkown model format: %s" % (model_path))
        self.build_param_dict()