Esempio n. 1
0
    args = parser.parse_args()

    device = torch.device(
        'cuda' if args.use_gpu and torch.cuda.is_available() else 'cpu')
    FloatTensor = torch.cuda.FloatTensor if args.use_gpu and torch.cuda.is_available(
    ) else torch.FloatTensor

    classes = utils.load_classes(
        args.class_path)  # Extracts class labels from file

    # Set up model
    model = Yolo().to(device)

    if args.weights_path is not None:
        # Load darknet weights
        model.load_darknet_weights(args.weights_path)

    model.eval()  # Set in evaluation mode

    # dataloader = DataLoader(
    #     ImageFolder(args.image_folder, img_size=args.img_size),
    #     batch_size=args.batch_size,
    #     shuffle=False,
    #     num_workers=args.n_cpu,
    # )

    if not os.path.exists(args.output_path):
        os.makedirs(args.output_path)

    if not os.path.exists(args.image_folder):
        print('No file or directory with the name {}'.format(
Esempio n. 2
0
    device = torch.device(
        'cuda' if args.use_gpu and torch.cuda.is_available() else 'cpu')

    if not os.path.exists(args.output_path):
        os.makedirs(args.output_path)

    # Initiate model
    model = Yolo(num_classes=20).to(device)

    # If specified we start from checkpoint
    if args.pretrained_weights:
        if args.pretrained_weights.endswith('.pth'):
            model.load_state_dict(torch.load(args.pretrained_weights))
        else:
            model.load_darknet_weights(args.pretrained_weights)

    # Get dataloader
    train_dataset = VOCDetection(args.train_path, args.img_size)
    train_loader = DataLoader(train_dataset,
                              batch_size=args.batch_size,
                              shuffle=True,
                              num_workers=args.num_workers,
                              collate_fn=train_dataset.collate_fn)
    val_dataset = VOCDetection(args.val_path, args.img_size)
    val_loader = DataLoader(val_dataset,
                            batch_size=args.batch_size,
                            shuffle=False,
                            num_workers=args.num_workers,
                            collate_fn=val_dataset.collate_fn)