Beispiel #1
0
def main(args):
    if args.im_size in [300, 512]:
        from model.detection.ssd_config import get_config
        cfg = get_config(args.im_size)
    else:
        print_error_message('{} image size not supported'.format(args.im_size))

    if args.dataset in ['voc', 'pascal']:
        from data_loader.detection.voc import VOC_CLASS_LIST
        num_classes = len(VOC_CLASS_LIST)
        object_names = VOC_CLASS_LIST

        cfg.conf_threshold = 0.4
    elif args.dataset == 'coco':
        from data_loader.detection.coco import COCO_CLASS_LIST
        num_classes = len(COCO_CLASS_LIST)
        object_names = COCO_CLASS_LIST

        cfg.conf_threshold = 0.3
    else:
        print_error_message('{} dataset not supported.'.format(args.dataset))
        exit(-1)

    cfg.NUM_CLASSES = num_classes

    # -----------------------------------------------------------------------------
    # Model
    # -----------------------------------------------------------------------------
    model = ssd(args, cfg)
    if args.weights_test:
        weight_dict = torch.load(args.weights_test, map_location='cpu')
        model.load_state_dict(weight_dict)
    else:
        print_error_message(
            "Please provide the location of weight files using --weights argument"
        )

    num_gpus = torch.cuda.device_count()
    device = 'cuda' if num_gpus >= 1 else 'cpu'

    if num_gpus >= 1:
        model = torch.nn.DataParallel(model)
        model = model.to(device)
        if torch.backends.cudnn.is_available():
            import torch.backends.cudnn as cudnn
            cudnn.benchmark = True
            cudnn.deterministic = True
    predictor = BoxPredictor(cfg=cfg, device=device)

    if args.live:
        main_live(predictor=predictor, model=model, object_names=object_names)
    else:
        if not os.path.isdir(args.save_dir):
            os.makedirs(args.save_dir)
        main_images(predictor=predictor,
                    model=model,
                    object_names=object_names,
                    in_dir=args.im_dir,
                    out_dir=args.save_dir)
Beispiel #2
0
def main(args):
    if args.im_size in [300, 512]:
        from model.detection.ssd_config import get_config
        cfg = get_config(args.im_size)
    else:
        print_error_message('{} image size not supported'.format(args.im_size))

    if args.dataset in ['voc', 'pascal']:
        from data_loader.detection.voc import VOC_CLASS_LIST
        num_classes = len(VOC_CLASS_LIST)
    elif args.dataset == 'coco':
        from data_loader.detection.coco import COCO_CLASS_LIST
        num_classes = len(COCO_CLASS_LIST)
    else:
        print_error_message('{} dataset not supported.'.format(args.dataset))
        exit(-1)

    cfg.NUM_CLASSES = num_classes

    # -----------------------------------------------------------------------------
    # Model
    # -----------------------------------------------------------------------------
    model = ssd(args, cfg)

    if args.weights_test:
        weight_dict = torch.load(args.weights_test, map_location='cpu')
        model.load_state_dict(weight_dict)

    num_params = model_parameters(model)
    flops = compute_flops(model,
                          input=torch.Tensor(1, 3, cfg.image_size,
                                             cfg.image_size))
    print_info_message(
        'FLOPs for an input of size {}x{}: {:.2f} million'.format(
            cfg.image_size, cfg.image_size, flops))
    print_info_message('Network Parameters: {:.2f} million'.format(num_params))

    num_gpus = torch.cuda.device_count()
    device = 'cuda' if num_gpus >= 1 else 'cpu'

    if num_gpus >= 1:
        model = torch.nn.DataParallel(model)
        model = model.to(device)
        if torch.backends.cudnn.is_available():
            import torch.backends.cudnn as cudnn
            cudnn.benchmark = True
            cudnn.deterministic = True

    # -----------------------------------------------------------------------------
    # Dataset
    # -----------------------------------------------------------------------------
    if args.dataset in ['voc', 'pascal']:
        from data_loader.detection.voc import VOCDataset, VOC_CLASS_LIST
        dataset_class = VOCDataset(root_dir=args.data_path,
                                   transform=None,
                                   is_training=False,
                                   split="VOC2007")
        class_names = VOC_CLASS_LIST
    else:
        from data_loader.detection.coco import COCOObjectDetection, COCO_CLASS_LIST
        dataset_class = COCOObjectDetection(root_dir=args.data_path,
                                            transform=None,
                                            is_training=False)
        class_names = COCO_CLASS_LIST

    # -----------------------------------------------------------------------------
    # Evaluate
    # -----------------------------------------------------------------------------
    predictor = BoxPredictor(cfg=cfg)
    predictions = eval(model=model, dataset=dataset_class, predictor=predictor)

    result_info = evaluate(dataset=dataset_class,
                           predictions=predictions,
                           output_dir=None,
                           dataset_name=args.dataset)

    # -----------------------------------------------------------------------------
    # Results
    # -----------------------------------------------------------------------------
    if args.dataset in ['voc', 'pascal']:
        mAP = result_info['map']
        ap = result_info['ap']
        for i, c_name in enumerate(class_names):
            if i == 0:  # skip the background class
                continue
            print_info_message('{}: {}'.format(c_name, ap[i]))

        print_info_message('* mAP: {}'.format(mAP))
    elif args.dataset == 'coco':
        print_info_message('AP_IoU=0.50:0.95: {}'.format(result_info.stats[0]))
        print_info_message('AP_IoU=0.50: {}'.format(result_info.stats[1]))
        print_info_message('AP_IoU=0.75: {}'.format(result_info.stats[2]))
    else:
        print_error_message('{} not supported'.format(args.dataset))

    print_log_message('Done')