Beispiel #1
0
    ###############################################################################
    # Load train data
    ###############################################################################
    PPT = [cfg.PROJ.TRAIN_PPT, (cfg.PROJ.TRAIN_PPT + cfg.PROJ.EVAL_PPT)]

    print(f"{gct()} : Loading traning data")
    train_data = DataLoader(
        HpatchDataset(
            data_type="train",
            PPT=PPT,
            use_all=cfg.PROJ.TRAIN_ALL,
            csv_file=cfg[cfg.PROJ.TRAIN]["csv"],
            root_dir=cfg[cfg.PROJ.TRAIN]["root"],
            transform=transforms.Compose([
                Grayscale(),
                Normalize(mean=cfg[cfg.PROJ.TRAIN]["MEAN"],
                          std=cfg[cfg.PROJ.TRAIN]["STD"]),
                LargerRescale((960, 1280)),
                RandomCrop((720, 960)),
                Rescale((240, 320)),
                ToTensor(),
            ]),
        ),
        batch_size=cfg.TRAIN.BATCH_SIZE,
        shuffle=True,
        num_workers=0,
    )

    ###############################################################################
    # Load evaluation data
    ###############################################################################
        root_dir += 'EFDataset'
        seq = 'ef'
        a = True
    else:
        print(f'cannot find {args.data}')
        exit(-1)

    mean = cfg[seq]["MEAN"]
    std = cfg[seq]["STD"]
    data_loader = DataLoader(HpatchDataset(
        data_type="test",
        PPT=[0.8, 0.9],
        use_all=a,
        csv_file=csv_file,
        root_dir=root_dir,
        transform=transforms.Compose([
            Grayscale(),
            Normalize(mean=mean, std=std),
            Rescale((960, 1280)),
            Rescale((480, 640)),
            ToTensor()
        ]),
    ),
                             batch_size=1,
                             shuffle=False,
                             num_workers=0)

    useful_list = []
    repeat_list = []
    with torch.no_grad():
        for i_batch, sample_batched in enumerate(data_loader, 1):
            im1_data, im1_info, homo12, im2_data, im2_info, homo21, im1_raw, im2_raw = parse_batch(
Beispiel #3
0
        a = True
    else:
        print(f'cannot find {args.data}')
        exit(-1)

    mean=cfg[seq]["MEAN"]
    std=cfg[seq]["STD"]
    data_loader = DataLoader(
        HpatchDataset(
            data_type="test",
            PPT=0.9,
            use_all=a,
            csv_file=csv_file,
            root_dir=root_dir,
            transform=transforms.Compose(
                [
                    Grayscale(),
                    Normalize(mean=mean, std=std),
                    Rescale((960, 1280)),
                    Rescale((240, 320)),
                    ToTensor()
                ]
            ),
        ),
        batch_size=1,
        shuffle=False,
        num_workers=0
    )

    useful_list = []
    repeat_list = []
    for i_batch, sample_batched in enumerate(data_loader, 1):