Example #1
0
 print("load_net...")
 if args.model == 'mAlexNet':
     net = mAlexNet()
 elif args.model == 'AlexNet':
     net = AlexNet()
 elif args.model == "carnet":
     net=carNet()
 elif args.model=="googlenet":
     net=models.googlenet()
     num_fc = net.fc.in_features
     net.fc = nn.Linear(num_fc, 2)
 elif args.model=="vgg16":
     net=models.vgg16(pretrained=True)
     num_fc = net.classifier[6].in_features
     net.classifier[6] = torch.nn.Linear(num_fc, 2)
     for param in net.parameters():
         param.requires_grad = False
     # 但是参数全部固定了,也没法进行学习,所以我们不固定最后一层,即全连接层
     for param in net.classifier[6].parameters():
         param.requires_grad = True
 elif args.model=="Inception_v3":
     net=models.inception_v3()
     net.AuxLogits.fc=nn.Linear(768,2)
     net.fc = nn.Linear(2048, 2)
     net.aux_logits=False
     # net=net.cuda()
 elif args.model=="mobilenet_v3_small":
     net=models.mobilenet_v3_small()
     net.classifier[3]=nn.Linear(1024,2)
     # net.fc = nn.Linear(num_fc, 2)
 elif args.model=="mobilenet_v3_large":
Example #2
0
    setattr(dataloader['base'], 'total_item_len', len(base_set))

    # ---------------------------- model ------------------------------------
    if config.model_name == 'resnet18':
        model = ResNet18(config.code_length, classes, config.class_mask)
    elif config.model_name == 'alexnet':
        model = AlexNet(config.code_length, classes, config.class_mask)
    else:
        print('undefined model ! ')

    model = nn.DataParallel(model)
    model.cuda()

    # ---------------------------- loss and opt ------------------------------------
    criterion = nn.CrossEntropyLoss()
    criterion_hash = nn.MSELoss()
    optimizer = optim.SGD(model.parameters(), lr=config.lr, momentum=0.9)
    exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1)

    # ---------------------------- log and train ------------------------------------
    log_file = open(
        config.model_name + '_' + config.dataset + '_' +
        str(config.code_length) + '.log', 'a')
    log_file.write(str(config))
    log_file.write('\n')
    print('training start ...')
    train_model(model, dataloader, criterion, criterion_hash, optimizer,
                exp_lr_scheduler, config.epoch, config.code_length, classes,
                log_file)
    log_file.close()