Ejemplo n.º 1
0
def test(img_path, anno_path):
    # load net
    num_classes = len(labelmap) + 1  # +1 for background
    net = build_ssd('test', 300, num_classes)  # initialize SSD
    net.load_state_dict(
        torch.load(args.trained_model, map_location=torch.device('cpu')))
    net.eval()
    print('Finished loading model!')
    # load data
    dataset = SIXrayDetection(args.dataset_root,
                              args.dataset,
                              BaseTransform(300, dataset_mean),
                              SIXrayAnnotationTransform(),
                              test_set_path=(img_path, anno_path))
    if args.cuda:
        net = net.cuda()
        cudnn.benchmark = True
    # evaluation
    test_net(args.result_folder,
             net,
             args.cuda,
             dataset,
             BaseTransform(net.size, dataset_mean),
             args.top_k,
             300,
             thresh=args.confidence_threshold)
Ejemplo n.º 2
0
def test(img_path, anno_path):
    # load net
    num_classes = len(SIXray_CLASSES) + 1  # +1 background
    net = build_ssd('test', 300, num_classes)  # initialize SSD
    net.load_state_dict(torch.load(args.trained_model, map_location=torch.device('cpu')))
    net.eval()
    # read and put into a file
    test_sets = []
    for anno_file in os.listdir(anno_path):
        test_sets.append(anno_file.split('.')[0])
    testset = SIXrayDetection(test_sets, None, SIXrayAnnotationTransform(), image_path=img_path, anno_path=anno_path)
    test_net(args.save_folder, net, args.cuda, testset,
             BaseTransform(net.size, (104, 117, 123)),
             thresh=args.visual_threshold)
Ejemplo n.º 3
0
def test_voc():
    # load net
    num_classes = len(SIXray_CLASSES) + 1 # +1 background
    net = build_ssd('test', 300, num_classes) # initialize SSD
    net.load_state_dict(torch.load(args.trained_model,map_location=torch.device('cpu')))
    net.eval()
    print('Finished loading model!')
    # load data
    test_sets = "./data/sixray/test_1650.txt"
    testset = SIXrayDetection(test_sets, None, SIXrayAnnotationTransform())
    if args.cuda:
        net = net.cuda()
        cudnn.benchmark = True
    # evaluation
    test_net(args.save_folder, net, args.cuda, testset,
             BaseTransform(net.size, (104, 117, 123)),
             thresh=args.visual_threshold)
def test_voc():
    # load net
    num_classes = len(SIXray_CLASSES) + 1  # +1 background
    net = build_ssd('test', 300, num_classes)  # initialize SSD
    net.load_state_dict(torch.load(args.trained_model))
    net.eval()
    print('Finished loading model!')
    # load data
    testset = SIXrayDetection(args.sixray_root, [('core_3000', 'val'),
                                                 ('coreless_3000', 'val')],
                              None, SIXrayAnnotationTransform())
    if args.cuda:
        net = net.cuda()
        cudnn.benchmark = True
    # evaluation
    test_net(args.save_folder,
             net,
             args.cuda,
             testset,
             BaseTransform(net.size, (104, 117, 123)),
             thresh=args.visual_threshold)

def evaluate_detections(box_list, output_dir, dataset):
    write_voc_results_file(box_list, dataset)
    do_python_eval(output_dir)


if __name__ == '__main__':
    # load net
    num_classes = len(labelmap) + 1  # +1 for background
    net = build_ssd('test', 300, num_classes)  # initialize SSD
    net.load_state_dict(torch.load(args.trained_model))
    net.eval()
    print('Finished loading model!')
    # load data
    dataset = SIXrayDetection(args.sixray_root, args.imagesetfile,
                              BaseTransform(300, dataset_mean),
                              SIXrayAnnotationTransform())
    if args.cuda:
        net = net.cuda()
        cudnn.benchmark = True
    # evaluation
    test_net(args.save_folder,
             net,
             args.cuda,
             dataset,
             BaseTransform(net.size, dataset_mean),
             args.top_k,
             300,
             thresh=args.confidence_threshold)
Ejemplo n.º 6
0
def train():
    cfg = xray
    dataset = SIXrayDetection(root=args.dataset_root, image_set='train.txt',
                              transform=SSDAugmentation(cfg['min_dim'],
                                                        MEANS))

    ssd_net = build_ssd('train', cfg['min_dim'], cfg['num_classes'])
    net = ssd_net

    if args.cuda:
        net = torch.nn.DataParallel(ssd_net)
        cudnn.benchmark = True

    if args.resume:
        print('Resuming training, loading {}...'.format(args.resume))
        ssd_net.load_weights(args.resume)
    else:
        vgg_weights = torch.load(args.save_folder + args.basenet)
        print('Loading base network...')
        ssd_net.vgg.load_state_dict(vgg_weights)

    if args.cuda:
        net = net.cuda()

    if not args.resume:
        print('Initializing weights...')
        # initialize newly added layers' weights with xavier method
        ssd_net.extras.apply(weights_init)
        ssd_net.loc.apply(weights_init)
        ssd_net.conf.apply(weights_init)

    optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=args.momentum,
                          weight_decay=args.weight_decay)
    scheduler = ExponentialLR(optimizer, gamma=args.gamma)
    criterion = MultiBoxLoss(cfg['num_classes'], 0.5, True, 0, True, 3, 0.5,
                             False, args.cuda)

    net.train()
    # loss counters
    loc_loss = 0
    conf_loss = 0
    print('Loading the dataset...')

    print('Training SSD on:', dataset.name)
    print('Using the specified args:')
    print(args)

    step_index = args.start_iter

    data_loader = data.DataLoader(dataset, args.batch_size,
                                  num_workers=args.num_workers,
                                  shuffle=True, collate_fn=detection_collate,
                                  pin_memory=True)
    # create batch iterator
    batch_iterator = iter(data_loader)
    for iteration in range(args.start_iter, cfg['max_iter']):
        # load train data
        try:
            images, targets = next(batch_iterator)
        except StopIteration:
            batch_iterator = iter(data_loader)
            images, targets = next(batch_iterator)

        if args.cuda:
            images = images.cuda()
            targets = [ann.cuda() for ann in targets]
        else:
            images = images
            targets = [ann for ann in targets]
        # forward
        t0 = time.time()
        out = net(images)
        # backprop
        optimizer.zero_grad()
        loss_l, loss_c = criterion(out, targets)
        loss = loss_l + loss_c
        loss.backward()
        optimizer.step()
        if iteration in cfg['lr_steps']:
            step_index += 1
            scheduler.step()
        t1 = time.time()
        loc_loss += loss_l.item()
        conf_loss += loss_c.item()

        if iteration % 10 == 0:
            print('timer: %.4f sec.' % (t1 - t0))
            print('iter ' + repr(iteration) + ' || Loss: %.4f ||' %
                  (loss.item()), end=' ')

        if iteration != 0 and iteration % 5000 == 0:
            print('Saving state, iter:', iteration)
            torch.save(ssd_net.state_dict(), 'weights/ssd300_SIXray_' +
                       repr(iteration) + '.pth')
    torch.save(ssd_net.state_dict(),
               args.save_folder + '' + dataset.name + '.pth')
Ejemplo n.º 7
0
def evaluate_detections(box_list, output_dir, dataset):
    write_voc_results_file(box_list, dataset)
    do_python_eval(output_dir)


if __name__ == '__main__':
    # load net
    num_classes = len(labelmap) + 1  # +1 for background
    net = build_ssd('test', 300, num_classes)  # initialize SSD
    net.load_state_dict(torch.load(args.trained_model))
    net.eval()
    print('Finished loading model!')
    # load data
    #test_sets = "./data/sixray/test_1650.txt"
    test_sets = imgsetpath
    dataset = SIXrayDetection(test_sets, BaseTransform(300, dataset_mean),
                              SIXrayAnnotationTransform())
    if args.cuda:
        net = net.cuda()
        cudnn.benchmark = True
    # evaluation
    test_net(args.save_folder,
             net,
             args.cuda,
             dataset,
             BaseTransform(net.size, dataset_mean),
             args.top_k,
             300,
             thresh=args.confidence_threshold)
Ejemplo n.º 8
0
def evaluate_detections(box_list, output_dir, dataset):
    write_voc_results_file(box_list, dataset)
    do_python_eval(output_dir)


if __name__ == '__main__':
    # load net
    num_classes = len(labelmap) + 1  # +1 for background
    net = build_ssd('test', 300, num_classes)  # initialize SSD
    net.load_state_dict(
        torch.load(args.trained_model, map_location=torch.device('cpu')))
    net.eval()
    print('Finished loading model!')
    # load data
    dataset = SIXrayDetection(args.dataset_root, args.dataset,
                              BaseTransform(300, dataset_mean),
                              SIXrayAnnotationTransform())
    if args.cuda:
        net = net.cuda()
        cudnn.benchmark = True
    # evaluation
    test_net(args.save_folder,
             net,
             args.cuda,
             dataset,
             BaseTransform(net.size, dataset_mean),
             args.top_k,
             300,
             thresh=args.confidence_threshold)
    #                               SIXrayAnnotationTransform())
    #     if args.cuda:
    #         net = net.cuda()
    #         cudnn.benchmark = True
    #     # evaluation
    #
    #     test_net(args.save_folder, net, args.cuda, dataset,
    #              BaseTransform(net.size, dataset_mean), args.top_k, 300,
    #              thresh=args.confidence_threshold)
    # load net
    num_classes = len(labelmap) + 1  # +a1 for background
    net = build_ssd('test', 300, num_classes)  # initialize SSD

    # net.load_state_dict(torch.load(args.trained_model))
    net.load_state_dict(torch.load(args.trained_model))

    net.eval()
    print('Finished loading model!' + args.trained_model)
    # load ori_data
    dataset = SIXrayDetection(args.sixray_root, test_imageSet,
                              BaseTransform(300, dataset_mean),
                              SIXrayAnnotationTransform())
    if args.cuda:
        net = net.cuda()
        cudnn.benchmark = True
    # evaluation

    test_net(args.save_folder, net, args.cuda, dataset,
             BaseTransform(net.size, dataset_mean), args.top_k, 300,
             thresh=args.confidence_threshold)