Пример #1
0
    return camera_id, labels


gallery_path = image_datasets['gallery'].imgs
query_path = image_datasets['query'].imgs

gallery_cam,gallery_label = get_id(gallery_path)
query_cam,query_label = get_id(query_path)

######################################################################
# Load Collected data Trained model
print('-------test-----------')
if opt.use_dense:
    model_structure = ft_net_dense(opt.class_cnt)
else:
    model_structure = ft_net(opt.class_cnt)

if opt.PCB:
    model_structure = PCB(opt.class_cnt)

model = load_network(model_structure)

# Remove the final fc layer and classifier layer
if not opt.PCB:
    model.model.fc = nn.Sequential()
    model.classifier = nn.Sequential()
else:
    model = PCB_test(model)

# Change to test mode
model = model.eval()
Пример #2
0
def main():
    opt = arg_parse()
    data_dir = opt.data_dir
    name = opt.name

    #print(gpu_ids[0])

    use_gpu = torch.cuda.is_available()
    dataloaders, dataset_sizes, class_names = load_data(data_dir, opt)

    ######################################################################
    # Finetuning the convnet
    # ----------------------
    # Load a pretrainied model and reset final fully connected layer.

    if opt.use_dense:
        model = ft_net_dense(len(class_names))
    else:
        model = ft_net(len(class_names))

    if opt.PCB:
        model = PCB(len(class_names))

    print(model)

    if use_gpu:
        model = model.cuda()

    criterion = nn.CrossEntropyLoss()

    if not opt.PCB:
        ignored_params = list(map(id, model.model.fc.parameters())) + list(
            map(id, model.classifier.parameters()))
        base_params = filter(lambda p: id(p) not in ignored_params,
                             model.parameters())
        optimizer_ft = optim.SGD([{
            'params': base_params,
            'lr': 0.01
        }, {
            'params': model.model.fc.parameters(),
            'lr': 0.1
        }, {
            'params': model.classifier.parameters(),
            'lr': 0.1
        }],
                                 weight_decay=5e-4,
                                 momentum=0.9,
                                 nesterov=True)
    else:
        ignored_params = list(map(id, model.model.fc.parameters()))
        ignored_params += (
            list(map(id, model.classifier0.parameters())) +
            list(map(id, model.classifier1.parameters())) +
            list(map(id, model.classifier2.parameters())) +
            list(map(id, model.classifier3.parameters())) +
            list(map(id, model.classifier4.parameters())) +
            list(map(id, model.classifier5.parameters()))
            #+list(map(id, model.classifier6.parameters() ))
            #+list(map(id, model.classifier7.parameters() ))
        )
        base_params = filter(lambda p: id(p) not in ignored_params,
                             model.parameters())
        optimizer_ft = optim.SGD(
            [
                {
                    'params': base_params,
                    'lr': 0.01
                },
                {
                    'params': model.model.fc.parameters(),
                    'lr': 0.1
                },
                {
                    'params': model.classifier0.parameters(),
                    'lr': 0.1
                },
                {
                    'params': model.classifier1.parameters(),
                    'lr': 0.1
                },
                {
                    'params': model.classifier2.parameters(),
                    'lr': 0.1
                },
                {
                    'params': model.classifier3.parameters(),
                    'lr': 0.1
                },
                {
                    'params': model.classifier4.parameters(),
                    'lr': 0.1
                },
                {
                    'params': model.classifier5.parameters(),
                    'lr': 0.1
                },
                #{'params': model.classifier6.parameters(), 'lr': 0.01},
                #{'params': model.classifier7.parameters(), 'lr': 0.01}
            ],
            weight_decay=5e-4,
            momentum=0.9,
            nesterov=True)

    # Decay LR by a factor of 0.1 every 40 epochs
    exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft,
                                           step_size=40,
                                           gamma=0.1)

    ######################################################################
    # Train and evaluate
    # ^^^^^^^^^^^^^^^^^^
    #
    # It should take around 1-2 hours on GPU.
    #
    dir_name = os.path.join('./model', name)
    if not os.path.isdir(dir_name):
        os.mkdir(dir_name)

    # save opts
    with open('%s/opts.json' % dir_name, 'w') as fp:
        json.dump(vars(opt), fp, indent=1)

    model = train_model(model,
                        criterion,
                        optimizer_ft,
                        exp_lr_scheduler,
                        opt,
                        dataloaders,
                        dataset_sizes,
                        num_epochs=opt.num_epoch,
                        use_gpu=use_gpu)