def buildDataset(args): def worker_init_fn(worker_id): np.random.seed(np.random.get_state()[1][0] + worker_id) if 'suncg' in args.dataList: from datasets.SUNCG import SUNCG as Dataset elif 'scannet' in args.dataList: from datasets.ScanNet import ScanNet as Dataset elif 'matterport' in args.dataList: from datasets.Matterport3D import Matterport3D as Dataset else: raise Exception("unknown dataset!") train_dataset = Dataset('train', config.nViews,AuthenticdepthMap=False,meta=False,rotate=False,rgbd=True,hmap=False,segm=True,normal=True\ ,list_=f"./data/dataList/{args.dataList}.npy",singleView=args.single_view,denseCorres=args.featurelearning,reproj=True,\ representation=args.representation,dynamicWeighting=args.dynamicWeighting,snumclass=args.snumclass) val_dataset = Dataset('test', nViews=config.nViews,meta=False,rotate=False,rgbd=True,hmap=False,segm=True,normal=True,\ list_=f"./data/dataList/{args.dataList}.npy",singleView=args.single_view,denseCorres=args.featurelearning,reproj=True,\ representation=args.representation,dynamicWeighting=args.dynamicWeighting,snumclass=args.snumclass) if args.num_workers == 1: train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True,drop_last=True, collate_fn=util.collate_fn_cat, worker_init_fn=worker_init_fn) val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=True,drop_last=True, collate_fn=util.collate_fn_cat, worker_init_fn=worker_init_fn) else: train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers,drop_last=True,collate_fn=util.collate_fn_cat, worker_init_fn=worker_init_fn) val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers,drop_last=True,collate_fn=util.collate_fn_cat, worker_init_fn=worker_init_fn) return train_loader,val_loader
args.para = opts(para_val[:, 0], para_val[:, 1], para_val[:, 2], para_val[:, 3]) if not os.path.exists("./data/relativePoseModule/"): os.makedirs("./data/relativePoseModule/") # cache the matching primitives primitive_file = f"./data/relativePoseModule/final_{args.dataset}_rlevel_{args.rlevel}.npy" if os.path.exists(primitive_file): primitives = np.load(primitive_file) else: if 'suncg' in args.dataset: from datasets.SUNCG import SUNCG as Dataset dataset_name = 'suncg' val_dataset = Dataset(args.split, nViews=2,meta=False,rotate=False,rgbd=True,hmap=False,segm=True,normal=True,\ list_=f"./data/dataList/suncg.npy",singleView=0) elif 'matterport' in args.dataset: from datasets.Matterport3D import Matterport3D as Dataset dataset_name = 'matterport' val_dataset = Dataset(args.split, nViews=2,meta=False,rotate=False,rgbd=True,hmap=False,segm=True,normal=True,\ list_=f"./data/dataList/matterport.npy",singleView=0) elif 'scannet' in args.dataset: from datasets.ScanNet import ScanNet as Dataset dataset_name = 'scannet' val_dataset = Dataset(args.split, nViews=2,meta=False,rotate=False,rgbd=True,hmap=False,segm=True,normal=True,\ list_=f"./data/dataList/scannet.npy",singleView=0,fullsize_rgbdn=True,\ representation=args.representation) loader = DataLoader(val_dataset, batch_size=1, shuffle=False,
def getLoader(args): testOption = 'test' if 'suncg' in args.dataList: from datasets.SUNCG import SUNCG as Dataset dataset_name = 'suncg' val_dataset = Dataset(testOption, nViews=config.nViews, meta=False, rotate=False, rgbd=True, hmap=False, segm=True, normal=True, list_=f"./data/dataList/{args.dataList}.npy", singleView=0, entrySplit=args.entrySplit) elif 'matterport' in args.dataList: from datasets.Matterport3D import Matterport3D as Dataset dataset_name = 'matterport' val_dataset = Dataset(testOption, nViews=config.nViews, meta=False, rotate=False, rgbd=True, hmap=False, segm=True, normal=True, list_=f"./data/dataList/{args.dataList}.npy", singleView=0, entrySplit=args.entrySplit) elif 'scannet' in args.dataList: from datasets.ScanNet import ScanNet as Dataset dataset_name = 'scannet' val_dataset = Dataset(testOption, nViews=config.nViews, meta=False, rotate=False, rgbd=True, hmap=False, segm=True, normal=True, list_=f"./data/dataList/{args.dataList}.npy", singleView=0, fullsize_rgbdn=True, entrySplit=args.entrySplit, representation=args.representation) if args.debug: loader = DataLoader(val_dataset, batch_size=1, shuffle=False, drop_last=True, collate_fn=util.collate_fn_cat, worker_init_fn=util.worker_init_fn) else: loader = DataLoader(val_dataset, batch_size=1, shuffle=False, num_workers=1, drop_last=True, collate_fn=util.collate_fn_cat, worker_init_fn=util.worker_init_fn) return dataset_name, loader