Ejemplo n.º 1
0
 def __init__(self):
     super(coarse_extractor,self).__init__()
     self.model_fc = model_no.Resnet50Fc()
     self.bottleneck = nn.Linear(2048, 256)
     self.bottleneck.weight.data.normal_(0, 0.005)
     self.bottleneck.bias.data.fill_(0.1)
     self.bottleneck_layer = nn.Sequential(self.bottleneck, nn.ReLU(), nn.Dropout(0.5))
Ejemplo n.º 2
0
 def __init__(self, feature_len):
     super(fine_net,self).__init__()
     self.model_fc = model_no.Resnet50Fc()
     self.bottleneck_0 = nn.Linear(feature_len, 256)
     self.bottleneck_0.weight.data.normal_(0, 0.005)
     self.bottleneck_0.bias.data.fill_(0.1)
     self.bottleneck_layer = nn.Sequential(self.bottleneck_0, nn.ReLU(), nn.Dropout(0.5))
     self.classifier_layer = predictor(256, cate_all[0])
Ejemplo n.º 3
0
Archivo: PAN.py Proyecto: Jing--Li/PAN
    def __init__(self):
        super(coarse_net, self).__init__()
        self.model_fc = model_no.Resnet50Fc()

        self.bottleneck = nn.Linear(2048, 256)
        self.bottleneck.weight.data.normal_(0, 0.005)
        self.bottleneck.bias.data.fill_(0.1)
        self.bottleneck_layer = nn.Sequential(self.bottleneck, nn.ReLU(),
                                              nn.Dropout(0.5))

        self.classifier_layer_1 = nn.Linear(256, cate_all[1])
        self.classifier_layer_1.weight.data.normal_(0, 0.01)
        self.classifier_layer_1.bias.data.fill_(0.0)

        self.classifier_layer_2 = nn.Linear(256, cate_all[2])
        self.classifier_layer_2.weight.data.normal_(0, 0.01)
        self.classifier_layer_2.bias.data.fill_(0.0)

        self.classifier_layer_3 = nn.Linear(256, cate_all[3])
        self.classifier_layer_3.weight.data.normal_(0, 0.01)
        self.classifier_layer_3.bias.data.fill_(0.0)
Ejemplo n.º 4
0
        batch_size=4,
        shuffle=False,
        num_workers=args.num_workers)

    prep_dict_test = prep.image_test_10crop(resize_size=256, crop_size=224)
    for i in range(10):
        dataset_list = ImageList(open(dataset_test).readlines(),
                                 transform=prep_dict_test["val" + str(i)])
        dataset_loaders["val" + str(i)] = torch.utils.data.DataLoader(
            dataset_list, batch_size=4, shuffle=False, num_workers=6)

    # network construction
    feature_len = 2048
    # fine-grained feature extractor + fine-grained label predictor
    devices = list(range(torch.cuda.device_count()))
    model_fc = model_no.Resnet50Fc()
    model_fc = model_fc.to(device)
    model_fc = nn.DataParallel(model_fc, device_ids=devices)
    my_fine_net = fine_net(feature_len)
    my_fine_net = my_fine_net.to(device)
    my_fine_net.train(True)
    model_fc.train(True)

    # criterion and optimizer
    criterion = {
        "classifier": nn.CrossEntropyLoss(),
        "kl_loss": nn.KLDivLoss(size_average=False),
        "adversarial": nn.BCELoss()
    }

    optimizer_dict = [{