def __init__(self, num_classes, last_stride, model_path, neck, neck_feat, model_name, pretrain_choice, cfg): super(Baseline_reduce, self).__init__() self.base = build_backbone(model_name, last_stride) if 'regnet' in model_name: self.in_planes = self.base.in_planes if pretrain_choice == 'imagenet': self.base.load_param(model_path) print('Loading pretrained ImageNet model......') if cfg.MODEL.POOLING_METHOD == 'GeM': print('using GeM pooling') self.gap = GeM() else: self.gap = nn.AdaptiveAvgPool2d(1) self.num_classes = num_classes self.feature_dim = cfg.MODEL.EMBEDDING_DIM #self.reduce = nn.Linear(self.in_planes, self.feature_dim) self.reduce = build_embedding_head(cfg.MODEL.EMBEDDING_HEAD, self.in_planes, self.feature_dim, cfg.MODEL.DROPOUT_PROB) self.ID_LOSS_TYPE = cfg.MODEL.ID_LOSS_TYPE self.bottleneck = nn.BatchNorm1d(self.feature_dim) self.bottleneck.bias.requires_grad_(False) # no shift if self.ID_LOSS_TYPE == 'arcface': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = Arcface(self.feature_dim, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'cosface': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = Cosface(self.feature_dim, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'amsoftmax': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = AMSoftmax(self.feature_dim, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'circle': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = CircleLoss(self.feature_dim, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) else: self.classifier = nn.Linear(self.feature_dim, self.num_classes, bias=False) self.bottleneck.apply(weights_init_kaiming) self.classifier.apply(weights_init_classifier)
def __init__(self, num_classes, last_stride, model_path, neck, neck_feat, model_name, pretrain_choice, cfg): super(Baseline_2_Head, self).__init__(num_classes, last_stride, model_path, neck, neck_feat, model_name, pretrain_choice, cfg) self.gap_1 = GeM() self.gap_2 = nn.AdaptiveAvgPool2d(1)
def __init__(self, num_classes, last_stride, model_path, neck, neck_feat, model_name, pretrain_choice, cfg): super(Baseline, self).__init__() self.base = build_backbone(model_name, last_stride) if 'regnet' in model_name: self.in_planes = self.base.in_planes if pretrain_choice == 'imagenet' and model_name not in [ 'tf_efficientnet_l2_ns', 'seresnext50_32x4d', 'dm_nfnet_f0', 'nf_resnet50', 'dm_nfnet_f1' ]: self.base.load_param(model_path) print('Loading pretrained ImageNet model......') if cfg.MODEL.POOLING_METHOD == 'GeM': print('using GeM pooling') self.gap = GeM() else: self.gap = nn.AdaptiveAvgPool2d(1) self.num_classes = num_classes self.neck = neck self.neck_feat = neck_feat self.ID_LOSS_TYPE = cfg.MODEL.ID_LOSS_TYPE self.MultiHeads = MultiHeads(feature_dim=2048, groups=32, mode='S', backbone_fc_dim=2048) self.bottleneck = nn.BatchNorm1d(self.in_planes) self.bottleneck.bias.requires_grad_(False) # no shift #self.bottleneck = IBN(self.in_planes) if self.ID_LOSS_TYPE == 'arcface': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = Arcface(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'cosface': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = Cosface(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'amsoftmax': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = AMSoftmax(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'circle': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = CircleLoss(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) else: self.classifier = nn.Linear(self.in_planes, self.num_classes, bias=False) self.bottleneck.apply(weights_init_kaiming) self.classifier.apply(weights_init_classifier) self.reduce = nn.Sequential( nn.Conv2d(3072, 2048, kernel_size=1, bias=False))
def __init__(self, num_classes, last_stride, model_path, neck, neck_feat, model_name, pretrain_choice, cfg): super(Baseline, self).__init__() #测试时model_name=resnet50_ibn_a self.base = build_backbone( model_name, last_stride) # 创建resnet50ibna模型(是否这一步不加载预训练参数?) if 'regnet' in model_name: self.in_planes = self.base.in_planes if pretrain_choice == 'imagenet': # 测试时MODEL.PRETRAIN_CHOICE "('self')"不加载ImageNet的参数给resnet50ibna,后续在大main会加载reid训练好的参数 self.base.load_param(model_path) print('Loading pretrained ImageNet model......') if cfg.MODEL.POOLING_METHOD == 'GeM': # GeM pooling print('using GeM pooling') self.gap = GeM() # GeM pooling else: self.gap = nn.AdaptiveAvgPool2d(1) #普通avg pooling self.num_classes = num_classes #重要参数,大main创建loader时得到 self.neck = neck #cfg.MODEL.NECK 是啥? self.neck_feat = neck_feat #cfg.TEST.NECK_FEAT 是啥? self.ID_LOSS_TYPE = cfg.MODEL.ID_LOSS_TYPE # 决定self.classifier类型 是啥? self.bottleneck = nn.BatchNorm1d( self.in_planes) # Batch Norm(dim=2048) self.bottleneck.bias.requires_grad_( False) # no shift 不训练Batch Norm的bias #self.bottleneck = IBN(self.in_planes) if self.ID_LOSS_TYPE == 'arcface': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = Arcface(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'cosface': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = Cosface(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'amsoftmax': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = AMSoftmax(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) elif self.ID_LOSS_TYPE == 'circle': print('using {}'.format(self.ID_LOSS_TYPE)) self.classifier = CircleLoss(self.in_planes, self.num_classes, s=cfg.SOLVER.COSINE_SCALE, m=cfg.SOLVER.COSINE_MARGIN) else: self.classifier = nn.Linear(self.in_planes, self.num_classes, bias=False) self.bottleneck.apply(weights_init_kaiming) #初始化BN层? self.classifier.apply(weights_init_classifier) #初始化classifier? #self.att = SpatialAttention2d(2048, 512) '''大初始化结束'''