示例#1
0
    def __init__(self, dataset="nyu"):
        print("VNL init")
        self.dataset = dataset
        if dataset == "nyu":
            test_args = Namespace(
                batchsize=2,
                cfg_file='lib/configs/resnext101_32x4d_nyudv2_class',
                dataroot='./',
                dataset='any',
                epoch=30,
                load_ckpt='./nyu_rawdata.pth',
                phase='test',
                phase_anno='test',
                results_dir='./evaluation',
                resume=False,
                start_epoch=0,
                start_step=0,
                thread=4,
                use_tfboard=False)
        elif dataset == "kitti":
            test_args = Namespace(
                batchsize=2,
                cfg_file='lib/configs/resnext101_32x4d_kitti_class',
                dataroot='./',
                dataset='kitti',
                epoch=30,
                load_ckpt='./kitti_eigen.pth',
                phase='test',
                phase_anno='test',
                results_dir='./evaluation',
                resume=False,
                start_epoch=0,
                start_step=0,
                thread=4,
                use_tfboard=False)

        test_args.thread = 1
        test_args.batchsize = 1

        merge_cfg_from_file(test_args)

        self.model = MetricDepthModel()

        self.model.eval()

        # load checkpoint
        if dataset == "nyu":
            load_ckpt("./vnl/nyu_rawdata.pth", self.model)
        elif dataset == "kitti":
            load_ckpt("./vnl/kitti_eigen.pth", self.model)

        self.model.cuda()
        self.model = torch.nn.DataParallel(self.model)

def get_num_lines(file_path):
    fp = open(file_path, "r+")
    buf = mmap.mmap(fp.fileno(), 0)
    lines = 0
    while buf.readline():
        lines += 1
    return lines


if __name__ == '__main__':
    test_args = TestOptions().parse()
    test_args.thread = 1
    test_args.batchsize = 1
    merge_cfg_from_file(test_args)

    # load model
    model = MetricDepthModel()
    model.eval()

    # load checkpoint
    if test_args.load_ckpt:
        load_ckpt(test_args, model)
    model.cuda()
    model = torch.nn.DataParallel(model)

    out_dir = os.path.join(test_args.dataroot, 'VNL_Monocular')
    if not os.path.exists(out_dir):
        os.mkdir(out_dir)
    # Train args
    train_opt = TrainOptions()
    train_args = train_opt.parse()
    train_opt.print_options(train_args)

    # Validation args
    val_opt = ValOptions()
    val_args = val_opt.parse()
    val_args.batchsize = 1
    val_args.thread = 0
    val_opt.print_options(val_args)

    train_dataloader = CustomerDataLoader(train_args)
    train_datasize = len(train_dataloader)
    gpu_num = torch.cuda.device_count()
    merge_cfg_from_file(train_datasize, gpu_num)

    val_dataloader = CustomerDataLoader(val_args)
    val_datasize = len(val_dataloader)

    # Print configs
    print_configs(cfg)

    # tensorboard logger
    if train_args.use_tfboard:
        from tensorboardX import SummaryWriter
        tblogger = SummaryWriter(cfg.TRAIN.LOG_DIR)

    # training status for logging
    training_stats = TrainingStats(
        train_args, cfg.TRAIN.LOG_INTERVAL,
示例#4
0
    # Train args
    train_opt = TrainOptions()
    train_args = train_opt.parse()
    train_opt.print_options(train_args)

    # Validation args
    val_opt = ValOptions()
    val_args = val_opt.parse()
    val_args.batchsize = 1
    val_args.thread = 0
    val_opt.print_options(val_args)

    train_dataloader = CustomerDataLoader(train_args)
    train_datasize = len(train_dataloader)
    gpu_num = torch.cuda.device_count()
    merge_cfg_from_file(train_args)

    val_dataloader = CustomerDataLoader(val_args)
    val_datasize = len(val_dataloader)

    # Print configs
    print_configs(cfg)

    # tensorboard logger
    if train_args.use_tfboard:
        from tensorboardX import SummaryWriter
        tblogger = SummaryWriter(cfg.TRAIN.LOG_DIR)

    # training status for logging
    training_stats = TrainingStats(
        train_args, cfg.TRAIN.LOG_INTERVAL,